diff --git a/.github/.wordlist.txt b/.github/.wordlist.txt index e832e93fd..04da5954e 100644 --- a/.github/.wordlist.txt +++ b/.github/.wordlist.txt @@ -83,4 +83,9 @@ li md pdf ul -WebSocket \ No newline at end of file +WebSocket +cometbft +CometBFT +CometBFT's +distributedcoplianceledger +zigbeealliance \ No newline at end of file diff --git a/.github/actions/setup-molecule/action.yml b/.github/actions/setup-molecule/action.yml index 4c12633d7..6de1b4508 100644 --- a/.github/actions/setup-molecule/action.yml +++ b/.github/actions/setup-molecule/action.yml @@ -24,7 +24,7 @@ runs: steps: - uses: actions/setup-python@v3 with: - python-version: '3.x' + python-version: '3.10' - name: install dependencies shell: bash run: pip install -r deployment/test-requirements.txt diff --git a/.github/workflows/on-demand_molecule.yml b/.github/workflows/on-demand_molecule.yml index 9f1acde81..345041d5b 100644 --- a/.github/workflows/on-demand_molecule.yml +++ b/.github/workflows/on-demand_molecule.yml @@ -18,13 +18,13 @@ name: Check All Molecule tests on: push: branches: - - master + - dev paths: - deployment/test-requirements.txt - deployment/ansible/roles/** pull_request: branches: - - master + - dev paths: - deployment/test-requirements.txt - deployment/ansible/roles/** diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b6a2144b8..7bc332258 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -129,9 +129,9 @@ jobs: runs-on: ${{ matrix.os }} steps: - name: Set up Go 1.x - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: - go-version: ^1.16 + go-version: ^1.20 - name: Check out code into the Go module directory uses: actions/checkout@v2 diff --git a/.github/workflows/verify.yml b/.github/workflows/verify.yml index 43ac1992a..6bebdf312 100644 --- a/.github/workflows/verify.yml +++ b/.github/workflows/verify.yml @@ -56,8 +56,8 @@ jobs: if: needs.changes.outputs.workflows == 'true' name: Run actionlint tool to verify lint issues in GitHub actions runs-on: ubuntu-latest - needs: - - changes +# needs: +# - changes steps: - uses: actions/checkout@master - uses: reviewdog/action-actionlint@v1 @@ -85,9 +85,9 @@ jobs: needs: - lint steps: - - uses: actions/setup-go@v3 + - uses: actions/setup-go@v4 with: - go-version: ^1.17 + go-version: ^1.20 - uses: actions/checkout@v3 - uses: actions/cache@v3 if: ${{ !env.ACT }} @@ -111,9 +111,9 @@ jobs: needs: - changes steps: - - uses: actions/setup-go@v3 + - uses: actions/setup-go@v4 with: - go-version: ^1.17 + go-version: ^1.20 - uses: actions/checkout@v3 - uses: actions/cache@v3 with: @@ -134,13 +134,13 @@ jobs: - changes steps: - uses: actions/checkout@v3 - - uses: actions/setup-go@v3 + - uses: actions/setup-go@v4 with: - go-version: 1.17 + go-version: 1.21 - name: golangci-lint - uses: golangci/golangci-lint-action@v3.6.0 + uses: golangci/golangci-lint-action@v3 with: - version: v1.46.0 + version: v1.55 args: --timeout 5m0s check: if: always() diff --git a/.golangci.yml b/.golangci.yml index defc6bc77..2b81712f5 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -50,3 +50,34 @@ linters: - nonamedreturns - exhaustruct - gofumpt + - depguard + - deadcode + - godot + - inamedparam + - goconst + - nosnakecase + - perfsprint + - unparam + - revive + +issues: + exclude-rules: + - path: _test\.go$ + linters: + - goconst + - stylecheck + - testifylint + - nosnakecase + - perfsprint + - dupword + - testifylint + - path: helpers\.go$ + linters: + - goconst + - stylecheck + - testifylint + - nosnakecase + - perfsprint + - dupword + - testifylint + diff --git a/Dockerfile b/Dockerfile index 5f75e6d79..a64ecfb24 100644 --- a/Dockerfile +++ b/Dockerfile @@ -22,7 +22,7 @@ FROM ubuntu:20.04 AS builder ARG GO_VERSION -ENV GO_VERSION=1.19.4 +ENV GO_VERSION=1.20 RUN apt-get update --fix-missing RUN apt-get install -y wget git gcc @@ -36,7 +36,7 @@ ENV GOPATH /go ENV PATH $GOPATH/bin:/usr/local/go/bin:$PATH RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 777 "$GOPATH" -RUN go install github.com/cosmos/cosmos-sdk/cosmovisor/cmd/cosmovisor@v1.0.0 +RUN go install github.com/cosmos/cosmos-sdk/cosmovisor/cmd/cosmovisor@v1.3.0 ############################ # STEP 2 build node image diff --git a/Dockerfile-build b/Dockerfile-build index e722e4afb..815069b54 100644 --- a/Dockerfile-build +++ b/Dockerfile-build @@ -20,7 +20,7 @@ FROM ubuntu:20.04 AS builder ARG GO_VERSION -ENV GO_VERSION=1.19.4 +ENV GO_VERSION=1.20 RUN apt-get update RUN apt-get install -y wget git gcc make diff --git a/README-DEV.md b/README-DEV.md index 96851c2a8..faab8115e 100644 --- a/README-DEV.md +++ b/README-DEV.md @@ -153,10 +153,16 @@ Please take into account the following when sending a PR: ## How To Add a new Module or Transaction -- Use [starport](https://github.com/tendermint/starport) command to scaffold the module. - Consider using a docker container built from the provided [Dockerfile](scripts/Dockerfile) to have a predictable version of starport. See [README.md](scripts/README.md). +- Use [ignite v0.27.1](https://github.com/ignite/cli) command to scaffold the module. + Consider using a docker container built from the provided [Dockerfile](scripts/Dockerfile) to have a predictable version of ignite. See [README.md](scripts/README.md). - Have a look at the scripts and commands used for generation of existing modules, messages and CRUD operations and do it in a similar way (for example [PKI module commands](scripts/starport/upgrade-0.44/07.pki_types.sh)). + - + **Note**: + 1. On previous scaffolding of modules `starport` cli used instead of `ignite`. + While generating new module, command structure will the same except it must start with `ignite`. + 2. After execution of command `ignite scaffold ...` generated `.proto` files will be added into `proto/distributedcoplianceledger/{module}/{proto}` + . So make sure to put contents of generated proto files into related folders inside `proto/zigbeealliance/..` folder. - If a new transaction with a new data in state (key-value) and new queries needs to be created, then both message and CRUD commands need to be executed. - If just a message to update existing state values need to be created, then a message command is enough. - Adjust the generated code @@ -166,19 +172,18 @@ Please take into account the following when sending a PR: - add `(cosmos_proto.scalar) = "cosmos.AddressString"` annotation for all fields with address/account type (such as `signer` or `owner`). - fix types if needed in `proto//.proto` files - Note1: `unit64` will be returned as string if the output is a JSON format. So, it's better to use `uint64` only when it's really `uint64`. - - Note2: for `uint16` type: use `int32` during starport scaffolding, and add custom validation (annotations above) to check the lower and upper bounds. - - Note3: for `uint32` type: use `int32` during starport scaffolding, then replace it by `uint32` in .proto files, re-generate the code and fix compilation errors. - - build proto (for example `starport chain build`). Fix compilation errors if any. + - Note2: for `uint16` type: use `int32` during ignite scaffolding, and add custom validation (annotations above) to check the lower and upper bounds. + - Note3: for `uint32` type: use `int32` during ignite scaffolding, then replace it by `uint32` in .proto files, re-generate the code and fix compilation errors. + - build proto (for example `ignite chain build`). Fix compilation errors if any. - generate openapi docs from proto using (`scripts/dcl-swagger-gen.sh`). It's recommended to run from container built from [Dockerfile](scripts/Dockerfile) - **Note1**: colons (`:`) are part of subject-id in PKI module, but colons are not allowed in gRPC REST URLs by default. `allow_colon_final_segments=true` should be used as a workaround. So, make sure that `runtime.AssumeColonVerbOpt(false)` in `/x/pki/types/query.pb.gw.go`. It's usually sufficient to revert the generated changes in `/x/pki/types/query.pb.gw.go`. - - **Note2**: move `compliance_info.pb.go` and `compliance_history_item.pb.go` to `types/compliance` and adjust the import in other places accordingly. It may be easier just to revert changes in all `*.pb.go` files not affected by your changes in `.proto` - - **Note3**: `starport chain build` needs to be called only if you made manual changes in `.proto` files. - There is no need to call `starport chain build` again once all errors and adjustments above are done. It's sufficient just to build the project via usual ways (such as `make build`) + - **Note3**: `ignite chain build` needs to be called only if you made manual changes in `.proto` files. + There is no need to call `ignite chain build` again once all errors and adjustments above are done. It's sufficient just to build the project via usual ways (such as `make build`) - Add static validation for new messages: - Call `validator.Validate(msg)` in `ValidateBasic` methods for all generated messages - Add additional checks to `ValidateBasic` that do not depend on the state (key-value) and order of transactions @@ -195,10 +200,10 @@ Please take into account the following when sending a PR: ## How To Make Changes in Data Model for Existing Modules -- Use [starport](https://github.com/tendermint/starport) command to scaffold the module. - Consider using the provided [Dockerfile](scripts/Dockerfile) to have a predictable version of starport. See [README.md](scripts/README.md). +- Use [ignite](https://github.com/ignite/cli) command to scaffold the module. + Consider using the provided [Dockerfile](scripts/Dockerfile) to have a predictable version of ignite. See [README.md](scripts/README.md). - **Never change `.pb` files manually**. Do the changes in `.proto` files. -- Every time `.proto` files change, re-generate the code (for example `starport chain build`) and fix compilation errors if any. +- Every time `.proto` files change, re-generate the code (for example `ignite chain build`) and fix compilation errors if any. - Update openapi docs from proto using (`scripts/dcl-swagger-gen.sh`). It's recommended to run from container built from [Dockerfile](scripts/Dockerfile). - **Note1**: colons (`:`) are part of subject-id in PKI module, but colons are not allowed in gRPC REST URLs by default. `allow_colon_final_segments=true` should be used as a workaround. @@ -206,8 +211,8 @@ Please take into account the following when sending a PR: It's usually sufficient to revert the generated changes in `/x/pki/types/query.pb.gw.go`. - **Note2**: move `compliance_info.pb.go` and `compliance_history_item.pb.go` to `types/compliance` and adjust the import in other places accordingly. It may be easier just to revert changes in all `*.pb.go` files not affected by your changes in `.proto` -- **Note3**: `starport chain build` needs to be called only if you made manual changes in `.proto` files. - There is no need to call `starport chain build` again once all errors and adjustments above are done. It's sufficient just to build the project via usual ways (such as `make build`) +- **Note3**: `ignite chain build` needs to be called only if you made manual changes in `.proto` files. + There is no need to call `ignite chain build` again once all errors and adjustments above are done. It's sufficient just to build the project via usual ways (such as `make build`) ## Update Cosmos-sdk Version @@ -221,14 +226,12 @@ Re-generate cosmos base openapi (service API from cosmos exposed in DCL) using [ ./scripts/cosmos-swagger-gen.sh tx ``` -## Update Tendermint Version - -Please note, that we depend on the Tendermint fork -due to hotfixes for and +## Update CometBFT Version +Please note, that we depend on the CometBFT fork https://github.com/zigbee-alliance/cometbft/releases/tag/v0.37.5 +due to hotfixes for https://github.com/tendermint/tendermint/issues/7640 and https://github.com/tendermint/tendermint/issues/7641 required for Light Client Proxy. -Now that fixes are merged to Tendermint master, so check if we still need to depend on the fork. -Also don't forget to update the link to the Tendermint RPC in [Swagger UI](docs/index.html). +Also don't forget to update the link to the CometBFT RPC in [Swagger UI](docs/index.html). ## Other diff --git a/README.md b/README.md index 73c073321..5cba394d3 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ DC Ledger is a public permissioned Ledger which can be used for two main use cas More information about use cases can be found in [DC Ledger Overview](./docs/design/DCL-Overview.pdf) and [Use Case Diagrams](docs/use_cases). -DC Ledger is based on [Tendermint](https://tendermint.com/) and [Cosmos SDK](https://cosmos.network/sdk). +DC Ledger is based on [CometBFT](https://cometbft.com/) and [Cosmos SDK](https://cosmos.network/sdk). DC Ledger is a public permissioned ledger in the following sense: @@ -40,7 +40,7 @@ In order to send write transactions to the ledger you need: ### Pool of Nodes -- A network of Tendermint-based validator nodes (Validators and Observers) maintaining the ledger. +- A network of CometBFT-based validator nodes (Validators and Observers) maintaining the ledger. - Every validator node (`dcld` binary) runs DC Ledger application code (based on Cosmos SDK) implementing the use cases. - See the proposed deployment in [deployment](docs/deployment.png) and [deployment-detailed](docs/deployment-detailed.png). - See recommended design for DCL MainNet deployment on AWS in [aws deployment](./docs/deployment-design-aws.md) @@ -54,7 +54,7 @@ In order to send write transactions to the ledger you need: - **Private Sentry Node:** a full node to connect other Validator or Sentry nodes only; should not be accessed by clients. - **Public Sentry Node:** a full node to connect other external full nodes (possibly observer nodes). - **Observer Node (ON):** a full node that doesn't participate in consensus. Should be used to receive read/write requests from the clients. -- **Light Client Proxy Node**: doesn't contain a full replication of data. Can be used as a proxy to untrusted Full nodes for single-value query requests sent via CLI or Tendermint RPC. +- **Light Client Proxy Node**: doesn't contain a full replication of data. Can be used as a proxy to untrusted Full nodes for single-value query requests sent via CLI or CometBFT RPC. It will verify all state proofs automatically. - **Seed Node**: provides a list of peers which a node can connect to. @@ -64,7 +64,7 @@ See - [Deployment-detailed](docs/deployment-detailed.png). - [Deployment Recommendations](https://github.com/zigbee-alliance/distributed-compliance-ledger/wiki/DCL-MainNet-Deployment) - [Deployment Recommendations for AWS](./docs/deployment-design-aws.md) -- +- - [Run Light Client Proxy](docs/running-light-client-proxy.md) ### Clients @@ -79,7 +79,7 @@ A Light Client Proxy can be connected to multiple nodes and will verify the stat - [CLI](#cli) - [REST](#rest) - [gRPC](#grpc) -- [Tendermint RPC and Light Client](#tendermint-rpc-and-light-client) +- [CometBFT RPC and Light Client](#cometbft-rpc-and-light-client) **Please note, that multi-value queries don't have state proofs support and should be sent to trusted nodes only.** @@ -126,7 +126,7 @@ See [Run local pool](README-DEV.md#run-local-pool) section in [README-DEV.md](RE Should be used if there are no trusted Observer or Validator nodes to connect. -It can be a proxy for CLI or direct requests from code done via Tendermint RPC. +It can be a proxy for CLI or direct requests from code done via CometBFT RPC. Please note, that CLI can use a Light Client proxy only for single-value query requests. A Full Node (Validator or Observer) should be used for multi-value query requests and write requests. @@ -139,7 +139,7 @@ See [Run Light Client Proxy](docs/running-light-client-proxy.md) for details how - **There are no state proofs in REST, so REST queries should be sent to trusted Validator or Observer nodes only.** - OpenAPI specification: . -- Any running node exposes a REST API at port `1317`. See . +- Any running node exposes a REST API at port `1317`. See . - See [transactions](docs/transactions.md) for a full list of endpoints. - REST HTTP(S) queries can be directly used for read requests. See [How to read from the Ledger](docs/transactions.md#how-to-read-from-the-ledger). @@ -150,23 +150,23 @@ See [Run Light Client Proxy](docs/running-light-client-proxy.md) for details how ### gRPC - **There are no state proofs in gRPC, so gRPC queries should be sent to trusted Validator or Observer nodes only.** -- Any running node exposes a REST API at port `9090`. See . +- Any running node exposes a REST API at port `9090`. See . - A client code can be generated for all popular languages from the proto files [proto](proto), see . - The generated client code can be used for read and write requests, i.e. generation and signing of transactions See [How to read from the Ledger](docs/transactions.md#how-to-read-from-the-ledger) and [How to write to the Ledger](docs/transactions.md#how-to-write-to-the-ledger) for details. -### Tendermint RPC and Light Client +### CometBFT RPC and Light Client -- Tendermint RPC is exposed by every running node at port `26657`. See . -- Tendermint RPC supports state proofs. Tendermint's Light Client library can be used to verify the state proofs. +- CometBFT RPC is exposed by every running node at port `26657`. See . +- CometBFT RPC supports state proofs. CometBFT's Light Client library can be used to verify the state proofs. So, if Light Client API is used, then it's possible to communicate with non-trusted nodes. - Please note, that multi-value queries don't have state proofs support and should be sent to trusted nodes only. - There are currently no DC Ledger specific API libraries for various platforms and languages, but they may be provided in the future. - The following libraries can be used as light clients: - - [Golang Light Client implementation](https://pkg.go.dev/github.com/tendermint/tendermint/lite2) - - [Rust Light Client implementation](https://docs.rs/tendermint-light-client/0.23.3/tendermint_light_client/) -- Refer to [this doc](./docs/tendermint-rpc.md) to see how to [subscribe](./docs/tendermint-rpc.md#subscribe) to a Tendermint WebSocket based events and/or [query](./docs/tendermint-rpc.md#querying-application-components) an application components. + - [Golang Light Client implementation](https://pkg.go.dev/github.com/cometbft/cometbft/light) + - [Rust Light Client implementation](https://docs.rs/cometbft-light-client/0.1.0-alpha.2/cometbft_light_client/) +- Refer to [this doc](./docs/cometbft-rpc.md) to see how to [subscribe](./docs/cometbft-rpc.md#subscribe) to a CometBFT WebSocket based events and/or [query](./docs/cometbft-rpc.md#querying-application-components) an application components. ### Instructions @@ -227,5 +227,5 @@ the following instructions from [how-to.md](docs/how-to.md) can be used for ever - [Running Node](docs/running-node.md) - [Pool Upgrade](docs/pool-upgrade.md) - [Pool Upgrade How To Guide](docs/pool-upgrade-how-to.md) -- [Tendermint](https://tendermint.com/) +- [CometBFT](https://cometbft.com/) - [Cosmos SDK](https://cosmos.network/sdk) diff --git a/app/app.go b/app/app.go index efc845ef7..f012f04f8 100644 --- a/app/app.go +++ b/app/app.go @@ -1,43 +1,47 @@ package app import ( + "encoding/json" "io" - "net/http" "os" "path/filepath" + reflectionv1 "cosmossdk.io/api/cosmos/reflection/v1" + dbm "github.com/cometbft/cometbft-db" + abci "github.com/cometbft/cometbft/abci/types" + "github.com/cometbft/cometbft/libs/log" + tmos "github.com/cometbft/cometbft/libs/os" "github.com/cosmos/cosmos-sdk/baseapp" "github.com/cosmos/cosmos-sdk/client" + nodeservice "github.com/cosmos/cosmos-sdk/client/grpc/node" "github.com/cosmos/cosmos-sdk/client/grpc/tmservice" - "github.com/cosmos/cosmos-sdk/client/rpc" "github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/runtime" + runtimeservices "github.com/cosmos/cosmos-sdk/runtime/services" + "github.com/cosmos/cosmos-sdk/server" "github.com/cosmos/cosmos-sdk/server/api" "github.com/cosmos/cosmos-sdk/server/config" servertypes "github.com/cosmos/cosmos-sdk/server/types" + storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/module" "github.com/cosmos/cosmos-sdk/version" - authrest "github.com/cosmos/cosmos-sdk/x/auth/client/rest" authtx "github.com/cosmos/cosmos-sdk/x/auth/tx" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + consensusparamkeeper "github.com/cosmos/cosmos-sdk/x/consensus/keeper" "github.com/cosmos/cosmos-sdk/x/params" paramskeeper "github.com/cosmos/cosmos-sdk/x/params/keeper" paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" "github.com/cosmos/cosmos-sdk/x/upgrade" upgradekeeper "github.com/cosmos/cosmos-sdk/x/upgrade/keeper" upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" - "github.com/tendermint/spm/cosmoscmd" - "github.com/tendermint/spm/openapiconsole" - abci "github.com/tendermint/tendermint/abci/types" - tmjson "github.com/tendermint/tendermint/libs/json" - "github.com/tendermint/tendermint/libs/log" - tmos "github.com/tendermint/tendermint/libs/os" - dbm "github.com/tendermint/tm-db" - "github.com/zigbee-alliance/distributed-compliance-ledger/docs" + + appparams "github.com/zigbee-alliance/distributed-compliance-ledger/app/params" dclpkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" compliancemodule "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance" compliancemodulekeeper "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/keeper" - compliancetypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" + dclcompliancetypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" dclauthmodule "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/ante" baseauthmodulekeeper "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/base/keeper" @@ -50,7 +54,7 @@ import ( dclupgrademoduletypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types" modelmodule "github.com/zigbee-alliance/distributed-compliance-ledger/x/model" modelmodulekeeper "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/keeper" - modelmoduletypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" + dclmodeltypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" pkimodule "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki" pkimodulekeeper "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/keeper" validatormodule "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator" @@ -127,8 +131,7 @@ var ( ) // module account permissions. - maccPerms = map[string][]string{ - /* + /* maccPerms = map[string][]string{ authtypes.FeeCollectorName: nil, distrtypes.ModuleName: nil, minttypes.ModuleName: {authtypes.Minter}, @@ -137,12 +140,12 @@ var ( govtypes.ModuleName: {authtypes.Burner}, ibctransfertypes.ModuleName: {authtypes.Minter, authtypes.Burner}, // this line is used by starport scaffolding # stargate/app/maccPerms - */ } + */ ) var ( - _ cosmoscmd.CosmosApp = (*App)(nil) + _ runtime.AppI = (*App)(nil) _ servertypes.Application = (*App)(nil) ) @@ -164,13 +167,14 @@ type App struct { cdc *codec.LegacyAmino appCodec codec.Codec interfaceRegistry types.InterfaceRegistry + txConfig client.TxConfig invCheckPeriod uint // keys to access the substores - keys map[string]*sdk.KVStoreKey - tkeys map[string]*sdk.TransientStoreKey - memKeys map[string]*sdk.MemoryStoreKey + keys map[string]*storetypes.KVStoreKey + tkeys map[string]*storetypes.TransientStoreKey + memKeys map[string]*storetypes.MemoryStoreKey // keepers /* @@ -184,7 +188,7 @@ type App struct { GovKeeper govkeeper.Keeper CrisisKeeper crisiskeeper.Keeper */ - UpgradeKeeper upgradekeeper.Keeper + UpgradeKeeper *upgradekeeper.Keeper ParamsKeeper paramskeeper.Keeper /* IBCKeeper *ibckeeper.Keeper // IBC Keeper must be a pointer in the app, so we can SetRouter on it correctly @@ -212,10 +216,24 @@ type App struct { ModelKeeper modelmodulekeeper.Keeper ComplianceKeeper compliancemodulekeeper.Keeper + + ConsensusParamsKeeper consensusparamkeeper.Keeper // this line is used by starport scaffolding # stargate/app/keeperDeclaration // the module manager mm *module.Manager + + // sm is the simulation manager + sm *module.SimulationManager + configurator module.Configurator +} + +func (app *App) RegisterNodeService(clientCtx client.Context) { + nodeservice.RegisterNodeService(clientCtx, app.GRPCQueryRouter()) +} + +func (app *App) SimulationManager() *module.SimulationManager { + return app.sm } // New returns a reference to an initialized Gaia. @@ -227,18 +245,19 @@ func New( skipUpgradeHeights map[int64]bool, homePath string, invCheckPeriod uint, - encodingConfig cosmoscmd.EncodingConfig, - appOpts servertypes.AppOptions, + encodingConfig appparams.EncodingConfig, baseAppOptions ...func(*baseapp.BaseApp), -) cosmoscmd.App { - appCodec := encodingConfig.Marshaler +) *App { + appCodec := encodingConfig.Codec cdc := encodingConfig.Amino interfaceRegistry := encodingConfig.InterfaceRegistry + txConfig := encodingConfig.TxConfig bApp := baseapp.NewBaseApp(Name, logger, db, encodingConfig.TxConfig.TxDecoder(), baseAppOptions...) bApp.SetCommitMultiStoreTracer(traceStore) bApp.SetVersion(version.Version) bApp.SetInterfaceRegistry(interfaceRegistry) + bApp.SetTxEncoder(txConfig.TxEncoder()) keys := sdk.NewKVStoreKeys( paramstypes.StoreKey, @@ -255,8 +274,8 @@ func New( dclupgrademoduletypes.StoreKey, dclpkitypes.StoreKey, vendorinfomoduletypes.StoreKey, - modelmoduletypes.StoreKey, - compliancetypes.StoreKey, + dclmodeltypes.StoreKey, + dclcompliancetypes.StoreKey, // this line is used by starport scaffolding # stargate/app/storeKey ) tkeys := sdk.NewTransientStoreKeys(paramstypes.TStoreKey) @@ -267,6 +286,7 @@ func New( cdc: cdc, appCodec: appCodec, interfaceRegistry: interfaceRegistry, + txConfig: txConfig, invCheckPeriod: invCheckPeriod, keys: keys, tkeys: tkeys, @@ -276,8 +296,8 @@ func New( app.ParamsKeeper = initParamsKeeper(appCodec, cdc, keys[paramstypes.StoreKey], tkeys[paramstypes.TStoreKey]) // set the BaseApp's parameter store - bApp.SetParamStore(app.ParamsKeeper.Subspace(baseapp.Paramspace).WithKeyTable(paramskeeper.ConsensusParamsKeyTable())) - // add capability keeper and ScopeToModule for ibc module + app.ConsensusParamsKeeper = consensusparamkeeper.NewKeeper(appCodec, keys[upgradetypes.StoreKey], authtypes.NewModuleAddress(authtypes.ModuleName).String()) + bApp.SetParamStore(&app.ConsensusParamsKeeper) // add capability keeper and ScopeToModule for ibc module // app.CapabilityKeeper = capabilitykeeper.NewKeeper(appCodec, keys[capabilitytypes.StoreKey], memKeys[capabilitytypes.MemStoreKey]) /* @@ -315,7 +335,7 @@ func New( app.FeeGrantKeeper = feegrantkeeper.NewKeeper(appCodec, keys[feegrant.StoreKey], app.AccountKeeper) */ - app.UpgradeKeeper = upgradekeeper.NewKeeper(skipUpgradeHeights, keys[upgradetypes.StoreKey], appCodec, homePath, app.BaseApp) + app.UpgradeKeeper = upgradekeeper.NewKeeper(skipUpgradeHeights, keys[upgradetypes.StoreKey], appCodec, homePath, app.BaseApp, authtypes.NewModuleAddress(upgradetypes.ModuleName).String()) /* // register the staking hooks // NOTE: stakingKeeper above is passed by reference, so that it will contain these hooks @@ -417,8 +437,8 @@ func New( app.ModelKeeper = *modelmodulekeeper.NewKeeper( appCodec, - keys[modelmoduletypes.StoreKey], - keys[modelmoduletypes.MemStoreKey], + keys[dclmodeltypes.StoreKey], + keys[dclmodeltypes.MemStoreKey], app.DclauthKeeper, app.ComplianceKeeper, @@ -426,8 +446,8 @@ func New( app.ComplianceKeeper = *compliancemodulekeeper.NewKeeper( appCodec, - keys[compliancetypes.StoreKey], - keys[compliancetypes.MemStoreKey], + keys[dclcompliancetypes.StoreKey], + keys[dclcompliancetypes.MemStoreKey], app.DclauthKeeper, app.ModelKeeper, ) @@ -508,12 +528,31 @@ func New( app.mm.SetOrderBeginBlockers( // TODO [issue 99] verify the order - upgradetypes.ModuleName, + dclauthmoduletypes.ModuleName, validatormoduletypes.ModuleName, + dclgenutilmoduletypes.ModuleName, + dclupgrademoduletypes.ModuleName, + upgradetypes.ModuleName, + dclpkitypes.ModuleName, + dclauthmoduletypes.ModuleName, + dclmodeltypes.ModuleName, + dclcompliancetypes.ModuleName, + vendorinfomoduletypes.ModuleName, + paramstypes.ModuleName, ) app.mm.SetOrderEndBlockers( + dclauthmoduletypes.ModuleName, validatormoduletypes.ModuleName, + dclgenutilmoduletypes.ModuleName, + dclupgrademoduletypes.ModuleName, + upgradetypes.ModuleName, + dclpkitypes.ModuleName, + dclauthmoduletypes.ModuleName, + dclmodeltypes.ModuleName, + dclcompliancetypes.ModuleName, + vendorinfomoduletypes.ModuleName, + paramstypes.ModuleName, ) // NOTE: The genutils module must occur after staking so that pools are @@ -544,16 +583,32 @@ func New( dclupgrademoduletypes.ModuleName, dclpkitypes.ModuleName, vendorinfomoduletypes.ModuleName, - modelmoduletypes.ModuleName, - compliancetypes.ModuleName, + dclmodeltypes.ModuleName, + dclcompliancetypes.ModuleName, + paramstypes.ModuleName, + upgradetypes.ModuleName, + dclupgrademoduletypes.ModuleName, // this line is used by starport scaffolding # stargate/app/initGenesis ) // app.mm.RegisterInvariants(&app.CrisisKeeper) - app.mm.RegisterRoutes(app.Router(), app.QueryRouter(), encodingConfig.Amino) + // app.mm.RegisterRoutes(app.Router(), app.QueryRouter(), encodingConfig.Amino) + + app.configurator = module.NewConfigurator(app.appCodec, app.MsgServiceRouter(), app.GRPCQueryRouter()) + app.mm.RegisterServices(app.configurator) + + // autocliv1.RegisterQueryServer(app.GRPCQueryRouter(), runtimeservices.NewAutoCLIQueryService(app.mm.Modules)) + reflectionSvc, err := runtimeservices.NewReflectionService() + if err != nil { + panic(err) + } + reflectionv1.RegisterReflectionServiceServer(app.GRPCQueryRouter(), reflectionSvc) - cfg := module.NewConfigurator(app.appCodec, app.MsgServiceRouter(), app.GRPCQueryRouter()) - app.mm.RegisterServices(cfg) + // create the simulation manager and define the order of the modules for deterministic simulations + overrideModules := map[string]module.AppModuleSimulation{ + dclauthmoduletypes.ModuleName: dclauthModule} + app.sm = module.NewSimulationManagerFromAppModules(app.mm.Modules, overrideModules) + app.sm.RegisterStoreDecoders() // initialize stores app.MountKVStores(keys) @@ -627,7 +682,7 @@ func New( app.UpgradeKeeper.SetUpgradeHandler( "v1.4", func(ctx sdk.Context, plan upgradetypes.Plan, fromVM module.VersionMap) (module.VersionMap, error) { - return app.mm.RunMigrations(ctx, cfg, fromVM) + return app.mm.RunMigrations(ctx, app.configurator, fromVM) }, ) @@ -650,7 +705,7 @@ func (app *App) EndBlocker(ctx sdk.Context, req abci.RequestEndBlock) abci.Respo // InitChainer application update at chain initialization. func (app *App) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain { var genesisState GenesisState - if err := tmjson.Unmarshal(req.AppStateBytes, &genesisState); err != nil { + if err := json.Unmarshal(req.AppStateBytes, &genesisState); err != nil { panic(err) } app.UpgradeKeeper.SetModuleVersionMap(ctx, app.mm.GetVersionMap()) @@ -699,21 +754,21 @@ func (app *App) InterfaceRegistry() types.InterfaceRegistry { // GetKey returns the KVStoreKey for the provided store key. // // NOTE: This is solely to be used for testing purposes. -func (app *App) GetKey(storeKey string) *sdk.KVStoreKey { +func (app *App) GetKey(storeKey string) *storetypes.KVStoreKey { return app.keys[storeKey] } // GetTKey returns the TransientStoreKey for the provided store key. // // NOTE: This is solely to be used for testing purposes. -func (app *App) GetTKey(storeKey string) *sdk.TransientStoreKey { +func (app *App) GetTKey(storeKey string) *storetypes.TransientStoreKey { return app.tkeys[storeKey] } // GetMemKey returns the MemStoreKey for the provided mem key. // // NOTE: This is solely used for testing purposes. -func (app *App) GetMemKey(storeKey string) *sdk.MemoryStoreKey { +func (app *App) GetMemKey(storeKey string) *storetypes.MemoryStoreKey { return app.memKeys[storeKey] } @@ -730,10 +785,6 @@ func (app *App) GetSubspace(moduleName string) paramstypes.Subspace { // API server. func (app *App) RegisterAPIRoutes(apiSvr *api.Server, apiConfig config.APIConfig) { clientCtx := apiSvr.ClientCtx - rpc.RegisterRoutes(clientCtx, apiSvr.Router) - - // Register legacy tx routes. - authrest.RegisterTxRoutes(clientCtx, apiSvr.Router) // Register new tx routes from grpc-gateway. authtx.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter) @@ -741,12 +792,12 @@ func (app *App) RegisterAPIRoutes(apiSvr *api.Server, apiConfig config.APIConfig tmservice.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter) // Register legacy and grpc-gateway routes for all modules. - ModuleBasics.RegisterRESTRoutes(clientCtx, apiSvr.Router) ModuleBasics.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter) - // register app's OpenAPI routes. - apiSvr.Router.Handle("/static/openapi.yml", http.FileServer(http.FS(docs.Docs))) - apiSvr.Router.HandleFunc("/", openapiconsole.Handler(Name, "/static/openapi.yml")) + // register swagger API from root so that other applications can override easily + if err := server.RegisterSwaggerAPI(apiSvr.ClientCtx, apiSvr.Router, apiConfig.Swagger); err != nil { + panic(err) + } } // RegisterTxService implements the Application.RegisterTxService method. @@ -756,21 +807,31 @@ func (app *App) RegisterTxService(clientCtx client.Context) { // RegisterTendermintService implements the Application.RegisterTendermintService method. func (app *App) RegisterTendermintService(clientCtx client.Context) { - tmservice.RegisterTendermintService(app.BaseApp.GRPCQueryRouter(), clientCtx, app.interfaceRegistry) + tmservice.RegisterTendermintService( + clientCtx, + app.BaseApp.GRPCQueryRouter(), + app.interfaceRegistry, + app.Query, + ) } -// GetMaccPerms returns a copy of the module account permissions. -func GetMaccPerms() map[string][]string { - dupMaccPerms := make(map[string][]string) - for k, v := range maccPerms { - dupMaccPerms[k] = v - } +// TxConfig returns App's TxConfig. +func (app *App) TxConfig() client.TxConfig { + return app.txConfig +} + +// Configurator get app configurator +func (app *App) Configurator() module.Configurator { + return app.configurator +} - return dupMaccPerms +// ModuleManager returns the app ModuleManager +func (app *App) ModuleManager() *module.Manager { + return app.mm } // initParamsKeeper init params keeper and its subspaces. -func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey sdk.StoreKey) paramskeeper.Keeper { +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey storetypes.StoreKey) paramskeeper.Keeper { paramsKeeper := paramskeeper.NewKeeper(appCodec, legacyAmino, key, tkey) paramsKeeper.Subspace(upgradetypes.ModuleName) @@ -792,8 +853,8 @@ func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino paramsKeeper.Subspace(dclupgrademoduletypes.ModuleName) paramsKeeper.Subspace(dclpkitypes.ModuleName) paramsKeeper.Subspace(vendorinfomoduletypes.ModuleName) - paramsKeeper.Subspace(modelmoduletypes.ModuleName) - paramsKeeper.Subspace(compliancetypes.ModuleName) + paramsKeeper.Subspace(dclmodeltypes.ModuleName) + paramsKeeper.Subspace(dclcompliancetypes.ModuleName) // this line is used by starport scaffolding # stargate/app/paramSubspace return paramsKeeper diff --git a/app/encoding.go b/app/encoding.go new file mode 100644 index 000000000..944e1927b --- /dev/null +++ b/app/encoding.go @@ -0,0 +1,36 @@ +package app + +import ( + "github.com/zigbee-alliance/distributed-compliance-ledger/app/params" + + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/std" + "github.com/cosmos/cosmos-sdk/x/auth/tx" +) + +// makeEncodingConfig creates an EncodingConfig for an amino based configuration. +func makeEncodingConfig() params.EncodingConfig { + amino := codec.NewLegacyAmino() + interfaceRegistry := types.NewInterfaceRegistry() + marshaler := codec.NewProtoCodec(interfaceRegistry) + txCfg := tx.NewTxConfig(marshaler, tx.DefaultSignModes) + + return params.EncodingConfig{ + InterfaceRegistry: interfaceRegistry, + Codec: marshaler, + TxConfig: txCfg, + Amino: amino, + } +} + +// MakeEncodingConfig creates an EncodingConfig +func MakeEncodingConfig() params.EncodingConfig { + encodingConfig := makeEncodingConfig() + std.RegisterLegacyAminoCodec(encodingConfig.Amino) + std.RegisterInterfaces(encodingConfig.InterfaceRegistry) + ModuleBasics.RegisterLegacyAminoCodec(encodingConfig.Amino) + ModuleBasics.RegisterInterfaces(encodingConfig.InterfaceRegistry) + + return encodingConfig +} diff --git a/app/export.go b/app/export.go index 1b7f9bcb4..eaac059df 100644 --- a/app/export.go +++ b/app/export.go @@ -3,16 +3,14 @@ package app import ( "encoding/json" + tmproto "github.com/cometbft/cometbft/proto/tendermint/types" servertypes "github.com/cosmos/cosmos-sdk/server/types" - tmproto "github.com/tendermint/tendermint/proto/tendermint/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator" ) // ExportAppStateAndValidators exports the state of the application for a genesis // file. -func (app *App) ExportAppStateAndValidators( - forZeroHeight bool, jailAllowedAddrs []string, -) (servertypes.ExportedApp, error) { +func (app *App) ExportAppStateAndValidators(_ bool, _ []string, modulesToExport []string) (servertypes.ExportedApp, error) { // as if they could withdraw from the start of the next block ctx := app.NewContext(true, tmproto.Header{Height: app.LastBlockHeight()}) @@ -27,7 +25,7 @@ func (app *App) ExportAppStateAndValidators( } */ - genState := app.mm.ExportGenesis(ctx, app.appCodec) + genState := app.mm.ExportGenesisForModules(ctx, app.appCodec, modulesToExport) appState, err := json.MarshalIndent(genState, "", " ") if err != nil { return servertypes.ExportedApp{}, err diff --git a/app/params/encoding.go b/app/params/encoding.go new file mode 100644 index 000000000..8ff9ea04b --- /dev/null +++ b/app/params/encoding.go @@ -0,0 +1,16 @@ +package params + +import ( + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/types" +) + +// EncodingConfig specifies the concrete encoding types to use for a given app. +// This is provided for compatibility between protobuf and amino implementations. +type EncodingConfig struct { + InterfaceRegistry types.InterfaceRegistry + Codec codec.Codec + TxConfig client.TxConfig + Amino *codec.LegacyAmino +} diff --git a/bench/Dockerfile b/bench/Dockerfile index 46c9145ff..ea45dbeb0 100644 --- a/bench/Dockerfile +++ b/bench/Dockerfile @@ -20,5 +20,5 @@ WORKDIR /home/locust RUN dcld config node ${DCLD_NODE} RUN dcld config chain-id ${DCLD_CHAIN_ID} RUN dcld config keyring-backend test -RUN dcld config broadcast-mode block +RUN dcld config broadcast-mode sync RUN dcld config output json diff --git a/buf.work.yaml b/buf.work.yaml new file mode 100644 index 000000000..1878b341b --- /dev/null +++ b/buf.work.yaml @@ -0,0 +1,3 @@ +version: v1 +directories: + - proto diff --git a/cmd/dcld/cmd/genaccounts.go b/cmd/dcld/cmd/genaccounts.go index c074c2e36..2f05de12a 100644 --- a/cmd/dcld/cmd/genaccounts.go +++ b/cmd/dcld/cmd/genaccounts.go @@ -6,19 +6,18 @@ import ( "fmt" "strings" + "github.com/spf13/cast" + "github.com/spf13/cobra" + "github.com/spf13/viper" + "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/flags" - "github.com/cosmos/cosmos-sdk/codec" - - // "github.com/cosmos/cosmos-sdk/crypto/keyring". "github.com/cosmos/cosmos-sdk/crypto/keyring" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" "github.com/cosmos/cosmos-sdk/server" sdk "github.com/cosmos/cosmos-sdk/types" authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" - "github.com/spf13/cast" - "github.com/spf13/cobra" - "github.com/spf13/viper" + "github.com/zigbee-alliance/distributed-compliance-ledger/x/common/types" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclgenutil" @@ -34,8 +33,6 @@ const ( ) // AddGenesisAccountCmd returns add-genesis-account cobra Command. -// -//nolint:staticcheck func AddGenesisAccountCmd(defaultNodeHome string) *cobra.Command { cmd := &cobra.Command{ Use: "add-genesis-account ", @@ -46,10 +43,11 @@ the address will be looked up in the local Keybase. `, Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) - - depCdc := clientCtx.JSONCodec - cdc := depCdc.(codec.Codec) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + cdc := clientCtx.Codec serverCtx := server.GetServerContextFromCmd(cmd) config := serverCtx.Config @@ -69,7 +67,7 @@ the address will be looked up in the local Keybase. } // attempt to lookup address from Keybase if no address was provided - kb, err := keyring.New(sdk.KeyringServiceName(), keyringBackend, clientCtx.HomeDir, inBuf) + kb, err := keyring.New(sdk.KeyringServiceName(), keyringBackend, clientCtx.HomeDir, inBuf, cdc) if err != nil { return err } @@ -79,7 +77,10 @@ the address will be looked up in the local Keybase. return fmt.Errorf("failed to get address from Keybase: %w", err) } - addr = info.GetAddress() + addr, err = info.GetAddress() + if err != nil { + return fmt.Errorf("failed to get address from Keybase: %w", err) + } } pkStr := viper.GetString(FlagPubKey) diff --git a/cmd/dcld/cmd/genaccounts_test.go b/cmd/dcld/cmd/genaccounts_test.go index bc7378107..28e1bb583 100644 --- a/cmd/dcld/cmd/genaccounts_test.go +++ b/cmd/dcld/cmd/genaccounts_test.go @@ -12,8 +12,8 @@ import ( "github.com/cosmos/cosmos-sdk/crypto/hd" "github.com/cosmos/cosmos-sdk/crypto/keyring" "github.com/cosmos/cosmos-sdk/server" - "github.com/cosmos/cosmos-sdk/simapp" - simcmd "github.com/cosmos/cosmos-sdk/simapp/simd/cmd" + "cosmossdk.io/simapp" + simcmd "cosmossdk.io/simapp/simd/cmd" "github.com/cosmos/cosmos-sdk/testutil/testdata" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/module" @@ -21,7 +21,7 @@ import ( genutiltest "github.com/cosmos/cosmos-sdk/x/genutil/client/testutil" "github.com/spf13/viper" "github.com/stretchr/testify/require" - "github.com/tendermint/tendermint/libs/log" + "github.com/cometbft/cometbft/libs/log" ) var testMbm = module.NewBasicManager(genutil.AppModuleBasic{}) diff --git a/cmd/dcld/cmd/light.go b/cmd/dcld/cmd/light.go index 00c0b53dc..6418ad315 100644 --- a/cmd/dcld/cmd/light.go +++ b/cmd/dcld/cmd/light.go @@ -13,22 +13,22 @@ import ( "syscall" "time" + dbm "github.com/cometbft/cometbft-db" storetypes "github.com/cosmos/cosmos-sdk/store/types" "github.com/spf13/cobra" - dbm "github.com/tendermint/tm-db" - - tmcfg "github.com/tendermint/tendermint/config" - "github.com/tendermint/tendermint/libs/log" - tmmath "github.com/tendermint/tendermint/libs/math" - "github.com/tendermint/tendermint/light" - lproxy "github.com/tendermint/tendermint/light/proxy" - lrpc "github.com/tendermint/tendermint/light/rpc" - dbs "github.com/tendermint/tendermint/light/store/db" - rpchttp "github.com/tendermint/tendermint/rpc/client/http" - rpcserver "github.com/tendermint/tendermint/rpc/jsonrpc/server" + + tmcfg "github.com/cometbft/cometbft/config" + "github.com/cometbft/cometbft/libs/log" + tmmath "github.com/cometbft/cometbft/libs/math" + "github.com/cometbft/cometbft/light" + lproxy "github.com/cometbft/cometbft/light/proxy" + lrpc "github.com/cometbft/cometbft/light/rpc" + dbs "github.com/cometbft/cometbft/light/store/db" + rpchttp "github.com/cometbft/cometbft/rpc/client/http" + rpcserver "github.com/cometbft/cometbft/rpc/jsonrpc/server" ) -// mostly copied from https://github.com/tendermint/tendermint/blob/master/cmd/tendermint/commands/light.go +// mostly copied from https://github.com/cometbft/cometbft/blob/master/cmd/tendermint/commands/light.go const ( FlagListenAddr = "laddr" @@ -238,7 +238,7 @@ func runProxy(cmd *cobra.Command, args []string) error { cfg.MaxOpenConnections = maxOpenConnections // If necessary adjust global WriteTimeout to ensure it's greater than // TimeoutBroadcastTxCommit. - // See https://github.com/tendermint/tendermint/issues/3435 + // See https://github.com/cometbft/cometbft/issues/3435 if cfg.WriteTimeout <= config.RPC.TimeoutBroadcastTxCommit { cfg.WriteTimeout = config.RPC.TimeoutBroadcastTxCommit + 10*time.Second } diff --git a/cmd/dcld/cmd/root.go b/cmd/dcld/cmd/root.go index 6402c2c6a..644cf8ae9 100644 --- a/cmd/dcld/cmd/root.go +++ b/cmd/dcld/cmd/root.go @@ -6,6 +6,14 @@ import ( "os" "path/filepath" + "github.com/spf13/cast" + "github.com/spf13/cobra" + "github.com/spf13/pflag" + + dbm "github.com/cometbft/cometbft-db" + tmcfg "github.com/cometbft/cometbft/config" + "github.com/cometbft/cometbft/libs/log" + tmtypes "github.com/cometbft/cometbft/types" "github.com/cosmos/cosmos-sdk/baseapp" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/config" @@ -17,18 +25,14 @@ import ( serverconfig "github.com/cosmos/cosmos-sdk/server/config" servertypes "github.com/cosmos/cosmos-sdk/server/types" "github.com/cosmos/cosmos-sdk/snapshots" + snapshottypes "github.com/cosmos/cosmos-sdk/snapshots/types" "github.com/cosmos/cosmos-sdk/store" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/cosmos/cosmos-sdk/types/module" authcmd "github.com/cosmos/cosmos-sdk/x/auth/client/cli" "github.com/cosmos/cosmos-sdk/x/crisis" - "github.com/spf13/cast" - "github.com/spf13/cobra" - "github.com/spf13/pflag" - "github.com/tendermint/spm/cosmoscmd" - tmcli "github.com/tendermint/tendermint/libs/cli" - "github.com/tendermint/tendermint/libs/log" - dbm "github.com/tendermint/tm-db" + + "github.com/zigbee-alliance/distributed-compliance-ledger/app" + appparams "github.com/zigbee-alliance/distributed-compliance-ledger/app/params" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" dclgenutilcli "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclgenutil/client/cli" ) @@ -36,8 +40,7 @@ import ( type ( // appCreator is an app creator. appCreator struct { - encodingConfig cosmoscmd.EncodingConfig - buildApp cosmoscmd.AppBuilder + encodingConfig appparams.EncodingConfig } ) @@ -51,13 +54,7 @@ type rootOptions struct { // envPrefix string } -func newRootOptions(options ...Option) rootOptions { - opts := rootOptions{} - opts.apply(options...) - - return opts -} - +//nolint:unused func (s *rootOptions) apply(options ...Option) { for _, o := range options { o(s) @@ -88,136 +85,111 @@ func WithEnvPrefix(envPrefix string) Option { */ // NewRootCmd creates a new root command for a Cosmos SDK application. -func NewRootCmd( - appName, - accountAddressPrefix, - defaultNodeHome, - defaultChainID string, - moduleBasics module.BasicManager, - buildApp cosmoscmd.AppBuilder, - options ...Option, -) (*cobra.Command, cosmoscmd.EncodingConfig) { - rootOptions := newRootOptions(options...) - - // Set config for prefixes - cosmoscmd.SetPrefixes(accountAddressPrefix) - - encodingConfig := cosmoscmd.MakeEncodingConfig(moduleBasics) +func NewRootCmd() (*cobra.Command, appparams.EncodingConfig) { + encodingConfig := app.MakeEncodingConfig() initClientCtx := client.Context{}. - WithCodec(encodingConfig.Marshaler). + WithCodec(encodingConfig.Codec). WithInterfaceRegistry(encodingConfig.InterfaceRegistry). WithTxConfig(encodingConfig.TxConfig). WithLegacyAmino(encodingConfig.Amino). WithInput(os.Stdin). WithAccountRetriever(dclauthtypes.AccountRetriever{}). - WithBroadcastMode(flags.BroadcastBlock). - WithHomeDir(defaultNodeHome). + WithHomeDir(app.DefaultNodeHome). WithViper("") rootCmd := &cobra.Command{ - Use: appName + "d", + Use: app.Name + "d", Short: "DCLedger App", PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { // set the default command outputs cmd.SetOut(cmd.OutOrStdout()) cmd.SetErr(cmd.ErrOrStderr()) - initClientCtx, err := client.ReadPersistentCommandFlags(initClientCtx, cmd.Flags()) + clientCtx, err := client.ReadPersistentCommandFlags(initClientCtx, cmd.Flags()) if err != nil { return err } - initClientCtx, err = config.ReadFromClientConfig(initClientCtx) + clientCtx, err = config.ReadFromClientConfig(clientCtx) if err != nil { return err } - if err := client.SetCmdClientContextHandler(initClientCtx, cmd); err != nil { + if err := client.SetCmdClientContextHandler(clientCtx, cmd); err != nil { return err } customAppTemplate, customAppConfig := initAppConfig() + customTMConfig := initTendermintConfig() - return server.InterceptConfigsPreRunHandler(cmd, customAppTemplate, customAppConfig) + return server.InterceptConfigsPreRunHandler(cmd, customAppTemplate, customAppConfig, customTMConfig) }, } - initRootCmd( - rootCmd, - encodingConfig, - defaultNodeHome, - moduleBasics, - buildApp, - rootOptions, - ) + initRootCmd(rootCmd, encodingConfig) overwriteFlagDefaults(rootCmd, map[string]string{ - flags.FlagChainID: defaultChainID, + flags.FlagChainID: app.Name, flags.FlagKeyringBackend: "test", }) return rootCmd, encodingConfig } +// initTendermintConfig helps to override default Tendermint Config values. +// return tmcfg.DefaultConfig if no custom configuration is required for the application. +func initTendermintConfig() *tmcfg.Config { + cfg := tmcfg.DefaultConfig() + + return cfg +} + func initRootCmd( rootCmd *cobra.Command, - encodingConfig cosmoscmd.EncodingConfig, - defaultNodeHome string, - moduleBasics module.BasicManager, - buildApp cosmoscmd.AppBuilder, - options rootOptions, + encodingConfig appparams.EncodingConfig, ) { + cfg := sdk.GetConfig() + cfg.Seal() + rootCmd.AddCommand( - dclgenutilcli.InitCmd(moduleBasics, defaultNodeHome), - dclgenutilcli.CollectGenTxsCmd(dclauthtypes.GenesisAccountsIterator{}, defaultNodeHome), + dclgenutilcli.InitCmd(app.ModuleBasics, app.DefaultNodeHome), + dclgenutilcli.CollectGenTxsCmd(dclauthtypes.GenesisAccountsIterator{}, app.DefaultNodeHome), // dclgenutilcli.MigrateGenesisCmd(), TODO issue 99: review, do we need that dclgenutilcli.GenTxCmd( - moduleBasics, + app.ModuleBasics, encodingConfig.TxConfig, dclauthtypes.GenesisAccountsIterator{}, - defaultNodeHome, + app.DefaultNodeHome, ), - dclgenutilcli.ValidateGenesisCmd(moduleBasics), - AddGenesisAccountCmd(defaultNodeHome), - tmcli.NewCompletionCmd(rootCmd, true), + dclgenutilcli.ValidateGenesisCmd(app.ModuleBasics), + AddGenesisAccountCmd(app.DefaultNodeHome), debug.Cmd(), config.Cmd(), ) a := appCreator{ encodingConfig, - buildApp, } // add server commands server.AddCommands( rootCmd, - defaultNodeHome, + app.DefaultNodeHome, a.newApp, a.appExport, - func(cmd *cobra.Command) { - addModuleInitFlags(cmd) - - if options.startCmdCustomizer != nil { - options.startCmdCustomizer(cmd) - } - }, + addModuleInitFlags, ) // add keybase, auxiliary RPC, query, and tx child commands rootCmd.AddCommand( rpc.StatusCommand(), - queryCommand(moduleBasics), - txCommand(moduleBasics), - keys.Commands(defaultNodeHome), + queryCommand(), + txCommand(), + keys.Commands(app.DefaultNodeHome), ) - // add user given sub commands. - for _, cmd := range options.addSubCmds { - rootCmd.AddCommand(cmd) - } rootCmd.AddCommand(LightCmd) } // queryCommand returns the sub-command to send queries to the app. -func queryCommand(moduleBasics module.BasicManager) *cobra.Command { +func queryCommand() *cobra.Command { cmd := &cobra.Command{ Use: "query", Aliases: []string{"q"}, @@ -235,14 +207,14 @@ func queryCommand(moduleBasics module.BasicManager) *cobra.Command { authcmd.QueryTxCmd(), ) - moduleBasics.AddQueryCommands(cmd) + app.ModuleBasics.AddQueryCommands(cmd) cmd.PersistentFlags().String(flags.FlagChainID, "", "The network chain ID") return cmd } // txCommand returns the sub-command to send transactions to the app. -func txCommand(moduleBasics module.BasicManager) *cobra.Command { +func txCommand() *cobra.Command { cmd := &cobra.Command{ Use: "tx", Short: "Transactions subcommands", @@ -262,7 +234,7 @@ func txCommand(moduleBasics module.BasicManager) *cobra.Command { authcmd.GetDecodeCommand(), ) - moduleBasics.AddTxCommands(cmd) + app.ModuleBasics.AddTxCommands(cmd) cmd.PersistentFlags().String(flags.FlagChainID, "", "The network chain ID") return cmd @@ -312,7 +284,7 @@ func (a appCreator) newApp( } snapshotDir := filepath.Join(cast.ToString(appOpts.Get(flags.FlagHome)), "data", "snapshots") - snapshotDB, err := sdk.NewLevelDB("metadata", snapshotDir) + snapshotDB, err := dbm.NewDB("metadata", dbm.GoLevelDBBackend, snapshotDir) if err != nil { panic(err) } @@ -321,7 +293,24 @@ func (a appCreator) newApp( panic(err) } - return a.buildApp( + snapshotOptions := snapshottypes.NewSnapshotOptions( + cast.ToUint64(appOpts.Get(server.FlagStateSyncSnapshotInterval)), + cast.ToUint32(appOpts.Get(server.FlagStateSyncSnapshotKeepRecent)), + ) + + homeDir := cast.ToString(appOpts.Get(flags.FlagHome)) + appChainID := cast.ToString(appOpts.Get(flags.FlagChainID)) + if appChainID == "" { + // fallback to genesis chain-id + appGenesis, err := tmtypes.GenesisDocFromFile(filepath.Join(homeDir, "config", "genesis.json")) + if err != nil { + panic(err) + } + + appChainID = appGenesis.ChainID + } + + return app.New( logger, db, traceStore, @@ -330,18 +319,17 @@ func (a appCreator) newApp( cast.ToString(appOpts.Get(flags.FlagHome)), cast.ToUint(appOpts.Get(server.FlagInvCheckPeriod)), a.encodingConfig, - appOpts, baseapp.SetPruning(pruningOpts), baseapp.SetMinGasPrices(cast.ToString(appOpts.Get(server.FlagMinGasPrices))), - baseapp.SetMinRetainBlocks(cast.ToUint64(appOpts.Get(server.FlagMinRetainBlocks))), baseapp.SetHaltHeight(cast.ToUint64(appOpts.Get(server.FlagHaltHeight))), baseapp.SetHaltTime(cast.ToUint64(appOpts.Get(server.FlagHaltTime))), + baseapp.SetMinRetainBlocks(cast.ToUint64(appOpts.Get(server.FlagMinRetainBlocks))), baseapp.SetInterBlockCache(cache), baseapp.SetTrace(cast.ToBool(appOpts.Get(server.FlagTrace))), baseapp.SetIndexEvents(cast.ToStringSlice(appOpts.Get(server.FlagIndexEvents))), - baseapp.SetSnapshotStore(snapshotStore), - baseapp.SetSnapshotInterval(cast.ToUint64(appOpts.Get(server.FlagStateSyncSnapshotInterval))), - baseapp.SetSnapshotKeepRecent(cast.ToUint32(appOpts.Get(server.FlagStateSyncSnapshotKeepRecent))), + baseapp.SetSnapshot(snapshotStore, snapshotOptions), + baseapp.SetIAVLCacheSize(cast.ToInt(appOpts.Get(server.FlagIAVLCacheSize))), + baseapp.SetChainID(appChainID), ) } @@ -354,15 +342,14 @@ func (a appCreator) appExport( forZeroHeight bool, jailAllowedAddrs []string, appOpts servertypes.AppOptions, + modulesToExport []string, ) (servertypes.ExportedApp, error) { - var exportableApp cosmoscmd.ExportableApp - homePath, ok := appOpts.Get(flags.FlagHome).(string) if !ok || homePath == "" { return servertypes.ExportedApp{}, errors.New("application home not set") } - exportableApp = a.buildApp( + app := app.New( logger, db, traceStore, @@ -371,16 +358,15 @@ func (a appCreator) appExport( homePath, uint(1), a.encodingConfig, - appOpts, ) if height != -1 { - if err := exportableApp.LoadHeight(height); err != nil { + if err := app.LoadHeight(height); err != nil { return servertypes.ExportedApp{}, err } } - return exportableApp.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs) + return app.ExportAppStateAndValidators(forZeroHeight, jailAllowedAddrs, modulesToExport) } // initAppConfig helps to override default appConfig template and configs. diff --git a/cmd/dcld/main.go b/cmd/dcld/main.go index d26375677..b05728e2e 100644 --- a/cmd/dcld/main.go +++ b/cmd/dcld/main.go @@ -1,8 +1,10 @@ package main import ( + "errors" "os" + "github.com/cosmos/cosmos-sdk/server" svrcmd "github.com/cosmos/cosmos-sdk/server/cmd" "github.com/zigbee-alliance/distributed-compliance-ledger/app" @@ -10,16 +12,14 @@ import ( ) func main() { - rootCmd, _ := cmd.NewRootCmd( - app.Name, - app.AccountAddressPrefix, - app.DefaultNodeHome, - app.Name, - app.ModuleBasics, - app.New, - // this line is used by starport scaffolding # root/arguments - ) - if err := svrcmd.Execute(rootCmd, app.DefaultNodeHome); err != nil { - os.Exit(1) + rootCmd, _ := cmd.NewRootCmd() + if err := svrcmd.Execute(rootCmd, "", app.DefaultNodeHome); err != nil { + var e server.ErrorCode + switch { + case errors.As(err, &e): + os.Exit(e.Code) + default: + os.Exit(1) + } } } diff --git a/cmd/settings/settings.go b/cmd/settings/settings.go deleted file mode 100644 index 1f6abaa19..000000000 --- a/cmd/settings/settings.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2020 DSR Corporation -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package settings - -import ( - "github.com/cosmos/cosmos-sdk/client/flags" - "github.com/cosmos/cosmos-sdk/store/types" -) - -const ( - // Default broadcast mode used for write transactions. - DefaultBroadcastMode = flags.BroadcastBlock -) - -// PruningStrategy of the application: Store every 100th state. Keep last 100 states. Do pruning at every 10th height. -var PruningStrategy = types.NewPruningOptions(100, 100, 10) diff --git a/config.yml b/config.yml index 387c2d031..b8d06850a 100644 --- a/config.yml +++ b/config.yml @@ -1,33 +1,33 @@ ---- -# Copyright 2022 -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - +version: 1 +build: + main: app + proto: + path: proto + third_party_paths: + - third_party/proto + - proto_vendor accounts: - - name: alice - coins: ["20000token", "200000000stake"] - - name: bob - coins: ["10000token", "100000000stake"] -validator: - name: alice - staked: "100000000stake" -client: - # openapi: - # path: "docs/static/openapi.yml" - vuex: - path: "vue/src/store" +- name: alice + coins: + - 20000token + - 200000000stake +- name: bob + coins: + - 10000token + - 100000000stake faucet: name: bob - coins: ["5token", "100000stake"] -build: - main: "app" + coins: + - 5token + - 100000stake + host: 0.0.0.0:4500 +client: + typescript: + path: ts-client + vuex: + path: vue/src/store + composables: + path: vue/src/composables +validators: +- name: alice + bonded: 100000000stake diff --git a/deployment/ansible/roles/bootstrap/defaults/main.yml b/deployment/ansible/roles/bootstrap/defaults/main.yml index f015bffe4..772fd3eb1 100644 --- a/deployment/ansible/roles/bootstrap/defaults/main.yml +++ b/deployment/ansible/roles/bootstrap/defaults/main.yml @@ -12,5 +12,3 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - - diff --git a/deployment/ansible/roles/bootstrap/molecule/default/tests/test_default.py b/deployment/ansible/roles/bootstrap/molecule/default/tests/test_default.py index 3c55d96db..12afa79fa 100644 --- a/deployment/ansible/roles/bootstrap/molecule/default/tests/test_default.py +++ b/deployment/ansible/roles/bootstrap/molecule/default/tests/test_default.py @@ -33,6 +33,6 @@ def test_service(host): "User=cosmovisor", "Group=dcl", 'Environment="DAEMON_HOME=/var/lib/dcl/.dcl" "DAEMON_NAME=dcld"', - "ExecStart=/usr/bin/cosmovisor start", + "ExecStart=/usr/bin/cosmovisor run start", ]: assert prop in svc.content_string diff --git a/deployment/ansible/roles/configure/defaults/main.yml b/deployment/ansible/roles/configure/defaults/main.yml index 56d2d7cde..9e6025d15 100644 --- a/deployment/ansible/roles/configure/defaults/main.yml +++ b/deployment/ansible/roles/configure/defaults/main.yml @@ -28,6 +28,6 @@ client: # : to Tendermint RPC interface for this chain node: "tcp://localhost:26657" # Transaction broadcasting mode (sync|async|block) - broadcast-mode: block + broadcast-mode: sync config: {} app: {} diff --git a/deployment/cosmovisor.service b/deployment/cosmovisor.service index bd21fd8f0..02e7c5acc 100644 --- a/deployment/cosmovisor.service +++ b/deployment/cosmovisor.service @@ -11,7 +11,7 @@ User=ubuntu Environment="DAEMON_HOME=/var/lib/ubuntu/.dcl" Environment="DAEMON_NAME=dcld" Environment="DAEMON_ALLOW_DOWNLOAD_BINARIES=true" -ExecStart=/usr/bin/cosmovisor start +ExecStart=/usr/bin/cosmovisor run start [Install] WantedBy=multi-user.target diff --git a/deployment/scripts/run_dcl_node b/deployment/scripts/run_dcl_node index a038ce462..ec147aa11 100755 --- a/deployment/scripts/run_dcl_node +++ b/deployment/scripts/run_dcl_node @@ -230,7 +230,7 @@ function config_cli { ./dcld config chain-id "$_chain_id" ./dcld config output json - ./dcld config broadcast-mode block + ./dcld config broadcast-mode sync if [[ -n "$KEYRING_BACKEND" ]]; then ./dcld config keyring-backend "$KEYRING_BACKEND" fi @@ -346,14 +346,14 @@ function run_node { echo -e "\tUse 'systemctl status cosmovisor' to get the node service status." echo "Use 'journalctl -u cosmovisor.service -f' to see node logs." else - cosmovisor start >./cosmovisor.log 2>&1 & + cosmovisor run start >./cosmovisor.log 2>&1 & echo "Node has been stared in the backgroud." fi } function wait_node_up { - local _timeout="${1:-5}" + local _timeout="${1:-60}" local _try=1 echo -e "Waiting the node becomes up" diff --git a/docker-compose.yml b/docker-compose.yml index e1775c8f4..39e24b498 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -30,7 +30,7 @@ services: networks: localnet: ipv4_address: 192.167.10.2 - command: cosmovisor start + command: cosmovisor run start node1: image: dcledger @@ -46,7 +46,7 @@ services: networks: localnet: ipv4_address: 192.167.10.3 - command: cosmovisor start + command: cosmovisor run start node2: image: dcledger @@ -62,7 +62,7 @@ services: networks: localnet: ipv4_address: 192.167.10.4 - command: cosmovisor start + command: cosmovisor run start node3: image: dcledger @@ -78,7 +78,7 @@ services: networks: localnet: ipv4_address: 192.167.10.5 - command: cosmovisor start + command: cosmovisor run start observer0: image: dcledger @@ -96,7 +96,7 @@ services: networks: localnet: ipv4_address: 192.167.10.6 - command: cosmovisor start + command: cosmovisor run start lightclient0: image: dcledger @@ -113,7 +113,7 @@ services: networks: localnet: ipv4_address: 192.167.10.7 - command: cosmovisor light dclchain -p tcp://192.167.10.2:26657 -w tcp://192.167.10.3:26657,tcp://192.167.10.4:26657 --start-timeout=10 --log-level=debug + command: cosmovisor run light dclchain -p tcp://192.167.10.2:26657 -w tcp://192.167.10.3:26657,tcp://192.167.10.4:26657 --start-timeout=10 --log-level=debug networks: diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md new file mode 100644 index 000000000..e64d38b9e --- /dev/null +++ b/docs/CHANGELOG.md @@ -0,0 +1,15 @@ +## [v1.4.0-dev3](https://github.com/zigbee-alliance/distributed-compliance-ledger/releases/tag/v1.4.0-dev3) - 2024-04-24 + +### Dependency Changes +* Upgrade cosmos-sdk to v0.47.8 + * Migrate to CometBFT. Follow the migration instructions in the [upgrade guide](https://github.com/cosmos/cosmos-sdk/blob/main/UPGRADING.md#migration-to-cometbft-part-1). + * Upgrade all related dependencies too +* Upgrade Golang version to 1.20 + +### Breaking Changes +* Transaction broadcasting `block` mode has been removed from the updated cosmos-sdk. +Starting from this version, dcl has only two modes: `sync` and `async`, with the default being `sync`. +In this mode, to obtain the actual result of a transaction (`txn`), an additional `query` call with the `txHash` must be executed. For example: + `dcld query tx txHash` - where `txHash` represents the hash of the previously executed transaction." +* `starport` cli tool is no longer supported. Please use `v0.27.2` version of [ignite](https://github.com/ignite/cli). +* Due to upgrading `cosmovisor` to v1.3.0 in Docker and shell files, the node starting command has changed from `cosmovisor start` to `cosmovisor run start` diff --git a/docs/tendermint-rpc.md b/docs/cometbft-rpc.md similarity index 99% rename from docs/tendermint-rpc.md rename to docs/cometbft-rpc.md index 28c380da7..17e482ad8 100644 --- a/docs/tendermint-rpc.md +++ b/docs/cometbft-rpc.md @@ -5,7 +5,7 @@ The default RPC listen address is tcp://0.0.0.0:26657. See the [docs](https://do ## WebSocket Events -Full list of Tendermint WebSocket events can be found [here](https://pkg.go.dev/github.com/tendermint/tendermint/types#pkg-constants). +Full list of Tendermint WebSocket events can be found [here](https://pkg.go.dev/github.com/cometbft/cometbft/types#pkg-constants). ### Subscribe diff --git a/docs/how-to.md b/docs/how-to.md index dc32f0e85..f3c0541f0 100644 --- a/docs/how-to.md +++ b/docs/how-to.md @@ -47,7 +47,8 @@ Here is the list of supported settings: - `chain-id ` - unique chain ID of the network you are going to connect to - `output ` - Output format (text/json) - `node ` - Address `:` of the node to connect. -- `broadcast-mode ` - Write transaction broadcast mode to use (one of: `sync`, `async`, `block`. `block` is default). +- `broadcast-mode ` - Write transaction broadcast mode to use (one of: `sync`, `async`. `sync` is default). + - Note: To get the actual result of transaction(`dcld tx ..`) one more query call with the `txHash` must be executed(`dcld query tx txHash`) In order to connect the CLI to a DC Ledger Network (Chain), the following parameters should be used: @@ -336,5 +337,5 @@ Node will be jailed and removed from the active validator set in the following c Note that jailed node will not be removed from the main index to track validator nodes. So it is not possible to create a new node with the same `validator address`. -In order to unjail the node and return it to the active tendermint validator set the sufficient number of Trustee's approvals is needed +In order to unjail the node and return it to the active cometbft validator set the sufficient number of Trustee's approvals is needed (see authorization rules). diff --git a/docs/running-light-client-proxy.md b/docs/running-light-client-proxy.md index c44e803c0..d45d2f5c3 100644 --- a/docs/running-light-client-proxy.md +++ b/docs/running-light-client-proxy.md @@ -2,14 +2,14 @@ Light Client Proxy can be used if there are no trusted Full Nodes (Validator or Observers) a client can connect to. -It can be a proxy for CLI or direct requests from code done via Tendermint RPC. +It can be a proxy for CLI or direct requests from code done via CometBFT RPC. Please note, that CLI can use a Light Client proxy only for single-value query requests. A Full Node (Validator or Observer) should be used for multi-value query requests and write requests. See the following links for details about a Light Client: -- +- ## Running Light Client Proxy - Short @@ -21,7 +21,7 @@ dcld config node tcp://:8888 ## Running Light Client Proxy - Detailed -See for details +See for details ### 1. Choose Semi-trusted or Non-trusted Nodes for Connection @@ -36,7 +36,7 @@ dcld light -p tcp://:26657 -w tcp://:266 Light Client Proxy is started at port `8888` by default. It can be changed with `--laddr` argument. -Light Client Proxy will write some information (about headers) to disk. Default location is `~/.tendermint-light`. +Light Client Proxy will write some information (about headers) to disk. Default location is `~/.cometbft-light`. It can be changed with `--dir` argument. Light Client needs initial height and hash it trusts. If no `--height` and `--hash` arguments provided, diff --git a/docs/running-node.md b/docs/running-node.md index 5623d9a31..be72c9c79 100644 --- a/docs/running-node.md +++ b/docs/running-node.md @@ -7,7 +7,7 @@ - `Private Sentry` - Full Node to connect other (external) Validator Nodes ([Sentry Node Architecture](https://forum.cosmos.network/t/sentry-node-architecture-overview/454)) - `Public Sentry` - Full Node to connect other (external) Full Nodes - `Observer` - Full Node for serving gRPC / REST / RPC clients -- `Seed` - Full Node for sharing IP addresses of `Public Sentry` Nodes ([Seed Node](https://docs.tendermint.com/)) +- `Seed` - Full Node for sharing IP addresses of `Public Sentry` Nodes ([Seed Node](https://docs.cometbft.com/)) ## DCL network architecture overview diff --git a/docs/static/cosmos-base-openapi.json b/docs/static/cosmos-base-openapi.json index b34f4741d..439a39e23 100644 --- a/docs/static/cosmos-base-openapi.json +++ b/docs/static/cosmos-base-openapi.json @@ -1,11 +1,151 @@ { "swagger": "2.0", "info": { - "title": "cosmos/base/tendermint/v1beta1/query.proto", - "version": "v0.44.4", - "description": "A REST interface for base service endpoints" + "title": "Cosmos SDK - gRPC Gateway docs", + "description": "A REST interface for state queries", + "version": "1.0.0" }, "paths": { + "/cosmos/base/tendermint/v1beta1/abci_query": { + "get": { + "summary": "ABCIQuery defines a query handler that supports ABCI queries directly to the\napplication, bypassing Tendermint completely. The ABCI query must contain\na valid and supported path, including app, custom, p2p, and store.", + "description": "Since: cosmos-sdk 0.46", + "operationId": "ABCIQuery", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int64" + }, + "log": { + "type": "string" + }, + "info": { + "type": "string" + }, + "index": { + "type": "string", + "format": "int64" + }, + "key": { + "type": "string", + "format": "byte" + }, + "value": { + "type": "string", + "format": "byte" + }, + "proof_ops": { + "type": "object", + "properties": { + "ops": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "key": { + "type": "string", + "format": "byte" + }, + "data": { + "type": "string", + "format": "byte" + } + }, + "description": "ProofOp defines an operation used for calculating Merkle root. The data could\nbe arbitrary format, providing necessary data for example neighbouring node\nhash.\n\nNote: This type is a duplicate of the ProofOp proto type defined in Tendermint." + } + } + }, + "description": "ProofOps is Merkle proof defined by the list of ProofOps.\n\nNote: This type is a duplicate of the ProofOps proto type defined in Tendermint." + }, + "height": { + "type": "string", + "format": "int64" + }, + "codespace": { + "type": "string" + } + }, + "description": "ABCIQueryResponse defines the response structure for the ABCIQuery gRPC query.\n\nNote: This type is a duplicate of the ResponseQuery proto type defined in\nTendermint." + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + } + } + } + } + } + }, + "parameters": [ + { + "name": "data", + "in": "query", + "required": false, + "type": "string", + "format": "byte" + }, + { + "name": "path", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "height", + "in": "query", + "required": false, + "type": "string", + "format": "int64" + }, + { + "name": "prove", + "in": "query", + "required": false, + "type": "boolean" + } + ], + "tags": [ + "Service" + ] + } + }, "/cosmos/base/tendermint/v1beta1/blocks/latest": { "get": { "summary": "GetLatestBlock returns the latest block.", @@ -41,6 +181,7 @@ "title": "BlockID" }, "block": { + "title": "Deprecated: please use `sdk_block` instead", "type": "object", "properties": { "header": { @@ -136,7 +277,7 @@ "format": "byte" } }, - "description": "Header defines the structure of a Tendermint block header." + "description": "Header defines the structure of a block header." }, "data": { "type": "object", @@ -410,7 +551,7 @@ "format": "byte" } }, - "description": "Header defines the structure of a Tendermint block header." + "description": "Header defines the structure of a block header." }, "commit": { "type": "object", @@ -508,7 +649,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -540,7 +681,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -585,7 +726,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -685,88 +826,9 @@ "description": "Commit contains the evidence that a block was committed by a set of validators." } } - } - }, - "description": "GetLatestBlockResponse is the response type for the Query/GetLatestBlock RPC method." - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "type": "object", - "properties": { - "error": { - "type": "string" - }, - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - } - } - } - } - } - }, - "tags": [ - "Service" - ] - } - }, - "/cosmos/base/tendermint/v1beta1/blocks/{height}": { - "get": { - "summary": "GetBlockByHeight queries block for given height.", - "operationId": "GetBlockByHeight", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "type": "object", - "properties": { - "block_id": { - "type": "object", - "properties": { - "hash": { - "type": "string", - "format": "byte" - }, - "part_set_header": { - "type": "object", - "properties": { - "total": { - "type": "integer", - "format": "int64" - }, - "hash": { - "type": "string", - "format": "byte" - } - }, - "title": "PartsetHeader" - } - }, - "title": "BlockID" }, - "block": { + "sdk_block": { + "title": "Since: cosmos-sdk 0.47", "type": "object", "properties": { "header": { @@ -859,7 +921,7 @@ }, "proposer_address": { "type": "string", - "format": "byte" + "description": "proposer_address is the original block proposer address, formatted as a Bech32 string.\nIn Tendermint, this type is `bytes`, but in the SDK, we convert it to a Bech32 string\nfor better UX." } }, "description": "Header defines the structure of a Tendermint block header." @@ -1136,7 +1198,7 @@ "format": "byte" } }, - "description": "Header defines the structure of a Tendermint block header." + "description": "Header defines the structure of a block header." }, "commit": { "type": "object", @@ -1234,7 +1296,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -1266,7 +1328,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -1311,7 +1373,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -1410,10 +1472,11 @@ }, "description": "Commit contains the evidence that a block was committed by a set of validators." } - } + }, + "description": "Block is tendermint type Block, with the Header proposer address\nfield converted to bech32 string." } }, - "description": "GetBlockByHeightResponse is the response type for the Query/GetBlockByHeight RPC method." + "description": "GetLatestBlockResponse is the response type for the Query/GetLatestBlock RPC method." } }, "default": { @@ -1446,601 +1509,4009 @@ "description": "Must be a valid serialized protocol buffer of the above specified type." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } } } }, - "parameters": [ - { - "name": "height", - "in": "path", - "required": true, - "type": "string", - "format": "int64" - } - ], "tags": [ "Service" ] } }, - "/cosmos/base/tendermint/v1beta1/node_info": { + "/cosmos/base/tendermint/v1beta1/blocks/{height}": { "get": { - "summary": "GetNodeInfo queries the current node info.", - "operationId": "GetNodeInfo", + "summary": "GetBlockByHeight queries block for given height.", + "operationId": "GetBlockByHeight", "responses": { "200": { "description": "A successful response.", "schema": { "type": "object", "properties": { - "default_node_info": { + "block_id": { "type": "object", "properties": { - "protocol_version": { - "type": "object", - "properties": { - "p2p": { - "type": "string", - "format": "uint64" - }, - "block": { - "type": "string", - "format": "uint64" - }, - "app": { - "type": "string", - "format": "uint64" - } - } - }, - "default_node_id": { - "type": "string" - }, - "listen_addr": { - "type": "string" - }, - "network": { - "type": "string" - }, - "version": { - "type": "string" - }, - "channels": { + "hash": { "type": "string", "format": "byte" }, - "moniker": { - "type": "string" - }, - "other": { + "part_set_header": { "type": "object", "properties": { - "tx_index": { - "type": "string" + "total": { + "type": "integer", + "format": "int64" }, - "rpc_address": { - "type": "string" + "hash": { + "type": "string", + "format": "byte" } - } + }, + "title": "PartsetHeader" } - } + }, + "title": "BlockID" }, - "application_version": { + "block": { + "title": "Deprecated: please use `sdk_block` instead", "type": "object", "properties": { - "name": { - "type": "string" - }, - "app_name": { - "type": "string" - }, - "version": { - "type": "string" - }, - "git_commit": { - "type": "string" - }, - "build_tags": { - "type": "string" - }, - "go_version": { - "type": "string" - }, - "build_deps": { - "type": "array", - "items": { - "type": "object", - "properties": { - "path": { - "type": "string", - "title": "module path" + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } }, - "version": { - "type": "string", - "title": "module version" + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } }, - "sum": { - "type": "string", - "title": "checksum" - } + "title": "BlockID" }, - "title": "Module is the type for VersionInfo" - } - }, - "cosmos_sdk_version": { - "type": "string", - "title": "Since: cosmos-sdk 0.43" - } - }, - "description": "VersionInfo is the type for the GetNodeInfoResponse message." - } - }, - "description": "GetNodeInfoResponse is the request type for the Query/GetNodeInfo RPC method." - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "type": "object", - "properties": { - "error": { - "type": "string" - }, - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" + } }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } + "description": "Header defines the structure of a block header." }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - } - } - } - } - } - }, - "tags": [ - "Service" - ] - } - }, - "/cosmos/base/tendermint/v1beta1/syncing": { - "get": { - "summary": "GetSyncing queries node syncing.", - "operationId": "GetSyncing", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "type": "object", - "properties": { - "syncing": { - "type": "boolean" - } - }, - "description": "GetSyncingResponse is the response type for the Query/GetSyncing RPC method." - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "type": "object", - "properties": { - "error": { - "type": "string" - }, - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "data": { + "type": "object", + "properties": { + "txs": { + "type": "array", + "items": { + "type": "string", + "format": "byte" + }, + "description": "Txs that will be applied by state @ block.Height+1.\nNOTE: not all txs here are valid. We're just agreeing on the order first.\nThis means that block.AppHash does not include these txs." + } }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } + "title": "Data contains the set of transactions included in the block" }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - } - } - } - } - } - }, - "tags": [ + "evidence": { + "type": "object", + "properties": { + "evidence": { + "type": "array", + "items": { + "type": "object", + "properties": { + "duplicate_vote_evidence": { + "type": "object", + "properties": { + "vote_a": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "vote_b": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "total_voting_power": { + "type": "string", + "format": "int64" + }, + "validator_power": { + "type": "string", + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + }, + "description": "DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes." + }, + "light_client_attack_evidence": { + "type": "object", + "properties": { + "conflicting_block": { + "type": "object", + "properties": { + "signed_header": { + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" + } + }, + "description": "Header defines the structure of a block header." + }, + "commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + } + }, + "validator_set": { + "type": "object", + "properties": { + "validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "proposer": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + } + } + } + } + }, + "common_height": { + "type": "string", + "format": "int64" + }, + "byzantine_validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + }, + "description": "LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client." + } + } + } + } + } + }, + "last_commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + } + }, + "sdk_block": { + "title": "Since: cosmos-sdk 0.47", + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "description": "proposer_address is the original block proposer address, formatted as a Bech32 string.\nIn Tendermint, this type is `bytes`, but in the SDK, we convert it to a Bech32 string\nfor better UX." + } + }, + "description": "Header defines the structure of a Tendermint block header." + }, + "data": { + "type": "object", + "properties": { + "txs": { + "type": "array", + "items": { + "type": "string", + "format": "byte" + }, + "description": "Txs that will be applied by state @ block.Height+1.\nNOTE: not all txs here are valid. We're just agreeing on the order first.\nThis means that block.AppHash does not include these txs." + } + }, + "title": "Data contains the set of transactions included in the block" + }, + "evidence": { + "type": "object", + "properties": { + "evidence": { + "type": "array", + "items": { + "type": "object", + "properties": { + "duplicate_vote_evidence": { + "type": "object", + "properties": { + "vote_a": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "vote_b": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "total_voting_power": { + "type": "string", + "format": "int64" + }, + "validator_power": { + "type": "string", + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + }, + "description": "DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes." + }, + "light_client_attack_evidence": { + "type": "object", + "properties": { + "conflicting_block": { + "type": "object", + "properties": { + "signed_header": { + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" + } + }, + "description": "Header defines the structure of a block header." + }, + "commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + } + }, + "validator_set": { + "type": "object", + "properties": { + "validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "proposer": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + } + } + } + } + }, + "common_height": { + "type": "string", + "format": "int64" + }, + "byzantine_validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + }, + "description": "LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client." + } + } + } + } + } + }, + "last_commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + }, + "description": "Block is tendermint type Block, with the Header proposer address\nfield converted to bech32 string." + } + }, + "description": "GetBlockByHeightResponse is the response type for the Query/GetBlockByHeight RPC method." + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + } + } + } + } + } + }, + "parameters": [ + { + "name": "height", + "in": "path", + "required": true, + "type": "string", + "format": "int64" + } + ], + "tags": [ + "Service" + ] + } + }, + "/cosmos/base/tendermint/v1beta1/node_info": { + "get": { + "summary": "GetNodeInfo queries the current node info.", + "operationId": "GetNodeInfo", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "type": "object", + "properties": { + "default_node_info": { + "type": "object", + "properties": { + "protocol_version": { + "type": "object", + "properties": { + "p2p": { + "type": "string", + "format": "uint64" + }, + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + } + }, + "default_node_id": { + "type": "string" + }, + "listen_addr": { + "type": "string" + }, + "network": { + "type": "string" + }, + "version": { + "type": "string" + }, + "channels": { + "type": "string", + "format": "byte" + }, + "moniker": { + "type": "string" + }, + "other": { + "type": "object", + "properties": { + "tx_index": { + "type": "string" + }, + "rpc_address": { + "type": "string" + } + } + } + } + }, + "application_version": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "app_name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "git_commit": { + "type": "string" + }, + "build_tags": { + "type": "string" + }, + "go_version": { + "type": "string" + }, + "build_deps": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string", + "title": "module path" + }, + "version": { + "type": "string", + "title": "module version" + }, + "sum": { + "type": "string", + "title": "checksum" + } + }, + "title": "Module is the type for VersionInfo" + } + }, + "cosmos_sdk_version": { + "type": "string", + "title": "Since: cosmos-sdk 0.43" + } + }, + "description": "VersionInfo is the type for the GetNodeInfoResponse message." + } + }, + "description": "GetNodeInfoResponse is the response type for the Query/GetNodeInfo RPC method." + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + } + } + } + } + } + }, + "tags": [ + "Service" + ] + } + }, + "/cosmos/base/tendermint/v1beta1/syncing": { + "get": { + "summary": "GetSyncing queries node syncing.", + "operationId": "GetSyncing", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "type": "object", + "properties": { + "syncing": { + "type": "boolean" + } + }, + "description": "GetSyncingResponse is the response type for the Query/GetSyncing RPC method." + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + } + } + } + } + } + }, + "tags": [ + "Service" + ] + } + }, + "/cosmos/base/tendermint/v1beta1/validatorsets/latest": { + "get": { + "summary": "GetLatestValidatorSet queries latest validator-set.", + "operationId": "GetLatestValidatorSet", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "type": "object", + "properties": { + "block_height": { + "type": "string", + "format": "int64" + }, + "validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string" + }, + "pub_key": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + }, + "description": "Validator is the type for the validator-set." + } + }, + "pagination": { + "description": "pagination defines an pagination for the response.", + "type": "object", + "properties": { + "next_key": { + "type": "string", + "format": "byte", + "description": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently. It will be empty if\nthere are no more results." + }, + "total": { + "type": "string", + "format": "uint64", + "title": "total is total number of results available if PageRequest.count_total\nwas set, its value is undefined otherwise" + } + } + } + }, + "description": "GetLatestValidatorSetResponse is the response type for the Query/GetValidatorSetByHeight RPC method." + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + } + } + } + } + } + }, + "parameters": [ + { + "name": "pagination.key", + "description": "key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set.", + "in": "query", + "required": false, + "type": "string", + "format": "byte" + }, + { + "name": "pagination.offset", + "description": "offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set.", + "in": "query", + "required": false, + "type": "string", + "format": "uint64" + }, + { + "name": "pagination.limit", + "description": "limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app.", + "in": "query", + "required": false, + "type": "string", + "format": "uint64" + }, + { + "name": "pagination.count_total", + "description": "count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set.", + "in": "query", + "required": false, + "type": "boolean" + }, + { + "name": "pagination.reverse", + "description": "reverse is set to true if results are to be returned in the descending order.\n\nSince: cosmos-sdk 0.43", + "in": "query", + "required": false, + "type": "boolean" + } + ], + "tags": [ + "Service" + ] + } + }, + "/cosmos/base/tendermint/v1beta1/validatorsets/{height}": { + "get": { + "summary": "GetValidatorSetByHeight queries validator-set at a given height.", + "operationId": "GetValidatorSetByHeight", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "type": "object", + "properties": { + "block_height": { + "type": "string", + "format": "int64" + }, + "validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string" + }, + "pub_key": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + }, + "description": "Validator is the type for the validator-set." + } + }, + "pagination": { + "description": "pagination defines an pagination for the response.", + "type": "object", + "properties": { + "next_key": { + "type": "string", + "format": "byte", + "description": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently. It will be empty if\nthere are no more results." + }, + "total": { + "type": "string", + "format": "uint64", + "title": "total is total number of results available if PageRequest.count_total\nwas set, its value is undefined otherwise" + } + } + } + }, + "description": "GetValidatorSetByHeightResponse is the response type for the Query/GetValidatorSetByHeight RPC method." + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + } + } + } + } + } + }, + "parameters": [ + { + "name": "height", + "in": "path", + "required": true, + "type": "string", + "format": "int64" + }, + { + "name": "pagination.key", + "description": "key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set.", + "in": "query", + "required": false, + "type": "string", + "format": "byte" + }, + { + "name": "pagination.offset", + "description": "offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set.", + "in": "query", + "required": false, + "type": "string", + "format": "uint64" + }, + { + "name": "pagination.limit", + "description": "limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app.", + "in": "query", + "required": false, + "type": "string", + "format": "uint64" + }, + { + "name": "pagination.count_total", + "description": "count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set.", + "in": "query", + "required": false, + "type": "boolean" + }, + { + "name": "pagination.reverse", + "description": "reverse is set to true if results are to be returned in the descending order.\n\nSince: cosmos-sdk 0.43", + "in": "query", + "required": false, + "type": "boolean" + } + ], + "tags": [ "Service" ] } + } + }, + "definitions": { + "cosmos.base.query.v1beta1.PageRequest": { + "type": "object", + "properties": { + "key": { + "type": "string", + "format": "byte", + "description": "key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set." + }, + "offset": { + "type": "string", + "format": "uint64", + "description": "offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set." + }, + "limit": { + "type": "string", + "format": "uint64", + "description": "limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app." + }, + "count_total": { + "type": "boolean", + "description": "count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set." + }, + "reverse": { + "type": "boolean", + "description": "reverse is set to true if results are to be returned in the descending order.\n\nSince: cosmos-sdk 0.43" + } + }, + "description": "message SomeRequest {\n Foo some_parameter = 1;\n PageRequest pagination = 2;\n }", + "title": "PageRequest is to be embedded in gRPC request messages for efficient\npagination. Ex:" + }, + "cosmos.base.query.v1beta1.PageResponse": { + "type": "object", + "properties": { + "next_key": { + "type": "string", + "format": "byte", + "description": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently. It will be empty if\nthere are no more results." + }, + "total": { + "type": "string", + "format": "uint64", + "title": "total is total number of results available if PageRequest.count_total\nwas set, its value is undefined otherwise" + } + }, + "description": "PageResponse is to be embedded in gRPC response messages where the\ncorresponding request message has used PageRequest.\n\n message SomeResponse {\n repeated Bar results = 1;\n PageResponse page = 2;\n }" + }, + "cosmos.base.tendermint.v1beta1.ABCIQueryResponse": { + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int64" + }, + "log": { + "type": "string" + }, + "info": { + "type": "string" + }, + "index": { + "type": "string", + "format": "int64" + }, + "key": { + "type": "string", + "format": "byte" + }, + "value": { + "type": "string", + "format": "byte" + }, + "proof_ops": { + "type": "object", + "properties": { + "ops": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "key": { + "type": "string", + "format": "byte" + }, + "data": { + "type": "string", + "format": "byte" + } + }, + "description": "ProofOp defines an operation used for calculating Merkle root. The data could\nbe arbitrary format, providing necessary data for example neighbouring node\nhash.\n\nNote: This type is a duplicate of the ProofOp proto type defined in Tendermint." + } + } + }, + "description": "ProofOps is Merkle proof defined by the list of ProofOps.\n\nNote: This type is a duplicate of the ProofOps proto type defined in Tendermint." + }, + "height": { + "type": "string", + "format": "int64" + }, + "codespace": { + "type": "string" + } + }, + "description": "ABCIQueryResponse defines the response structure for the ABCIQuery gRPC query.\n\nNote: This type is a duplicate of the ResponseQuery proto type defined in\nTendermint." + }, + "cosmos.base.tendermint.v1beta1.Block": { + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "description": "proposer_address is the original block proposer address, formatted as a Bech32 string.\nIn Tendermint, this type is `bytes`, but in the SDK, we convert it to a Bech32 string\nfor better UX." + } + }, + "description": "Header defines the structure of a Tendermint block header." + }, + "data": { + "type": "object", + "properties": { + "txs": { + "type": "array", + "items": { + "type": "string", + "format": "byte" + }, + "description": "Txs that will be applied by state @ block.Height+1.\nNOTE: not all txs here are valid. We're just agreeing on the order first.\nThis means that block.AppHash does not include these txs." + } + }, + "title": "Data contains the set of transactions included in the block" + }, + "evidence": { + "type": "object", + "properties": { + "evidence": { + "type": "array", + "items": { + "type": "object", + "properties": { + "duplicate_vote_evidence": { + "type": "object", + "properties": { + "vote_a": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "vote_b": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "total_voting_power": { + "type": "string", + "format": "int64" + }, + "validator_power": { + "type": "string", + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + }, + "description": "DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes." + }, + "light_client_attack_evidence": { + "type": "object", + "properties": { + "conflicting_block": { + "type": "object", + "properties": { + "signed_header": { + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" + } + }, + "description": "Header defines the structure of a block header." + }, + "commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + } + }, + "validator_set": { + "type": "object", + "properties": { + "validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "proposer": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + } + } + } + } + }, + "common_height": { + "type": "string", + "format": "int64" + }, + "byzantine_validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + }, + "description": "LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client." + } + } + } + } + } + }, + "last_commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + }, + "description": "Block is tendermint type Block, with the Header proposer address\nfield converted to bech32 string." }, - "/cosmos/base/tendermint/v1beta1/validatorsets/latest": { - "get": { - "summary": "GetLatestValidatorSet queries latest validator-set.", - "operationId": "GetLatestValidatorSet", - "responses": { - "200": { - "description": "A successful response.", - "schema": { + "cosmos.base.tendermint.v1beta1.GetBlockByHeightResponse": { + "type": "object", + "properties": { + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "block": { + "title": "Deprecated: please use `sdk_block` instead", + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" + } + }, + "description": "Header defines the structure of a block header." + }, + "data": { + "type": "object", + "properties": { + "txs": { + "type": "array", + "items": { + "type": "string", + "format": "byte" + }, + "description": "Txs that will be applied by state @ block.Height+1.\nNOTE: not all txs here are valid. We're just agreeing on the order first.\nThis means that block.AppHash does not include these txs." + } + }, + "title": "Data contains the set of transactions included in the block" + }, + "evidence": { + "type": "object", + "properties": { + "evidence": { + "type": "array", + "items": { + "type": "object", + "properties": { + "duplicate_vote_evidence": { + "type": "object", + "properties": { + "vote_a": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "vote_b": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "total_voting_power": { + "type": "string", + "format": "int64" + }, + "validator_power": { + "type": "string", + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + }, + "description": "DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes." + }, + "light_client_attack_evidence": { + "type": "object", + "properties": { + "conflicting_block": { + "type": "object", + "properties": { + "signed_header": { + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" + } + }, + "description": "Header defines the structure of a block header." + }, + "commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + } + }, + "validator_set": { + "type": "object", + "properties": { + "validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "proposer": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + } + } + } + } + }, + "common_height": { + "type": "string", + "format": "int64" + }, + "byzantine_validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + }, + "description": "LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client." + } + } + } + } + } + }, + "last_commit": { "type": "object", "properties": { - "block_height": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + } + }, + "sdk_block": { + "title": "Since: cosmos-sdk 0.47", + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { "type": "string", - "format": "int64" + "format": "byte", + "title": "hashes of block data" }, - "validators": { + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "description": "proposer_address is the original block proposer address, formatted as a Bech32 string.\nIn Tendermint, this type is `bytes`, but in the SDK, we convert it to a Bech32 string\nfor better UX." + } + }, + "description": "Header defines the structure of a Tendermint block header." + }, + "data": { + "type": "object", + "properties": { + "txs": { + "type": "array", + "items": { + "type": "string", + "format": "byte" + }, + "description": "Txs that will be applied by state @ block.Height+1.\nNOTE: not all txs here are valid. We're just agreeing on the order first.\nThis means that block.AppHash does not include these txs." + } + }, + "title": "Data contains the set of transactions included in the block" + }, + "evidence": { + "type": "object", + "properties": { + "evidence": { "type": "array", "items": { "type": "object", "properties": { - "address": { - "type": "string" + "duplicate_vote_evidence": { + "type": "object", + "properties": { + "vote_a": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "vote_b": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "total_voting_power": { + "type": "string", + "format": "int64" + }, + "validator_power": { + "type": "string", + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + }, + "description": "DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes." }, - "pub_key": { + "light_client_attack_evidence": { "type": "object", "properties": { - "type_url": { + "conflicting_block": { + "type": "object", + "properties": { + "signed_header": { + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" + } + }, + "description": "Header defines the structure of a block header." + }, + "commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + } + }, + "validator_set": { + "type": "object", + "properties": { + "validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "proposer": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + } + } + } + } + }, + "common_height": { "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "format": "int64" }, - "value": { + "byzantine_validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "total_voting_power": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "voting_power": { - "type": "string", - "format": "int64" - }, - "proposer_priority": { - "type": "string", - "format": "int64" + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + }, + "description": "LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client." } - }, - "description": "Validator is the type for the validator-set." - } - }, - "pagination": { - "description": "pagination defines an pagination for the response.", - "type": "object", - "properties": { - "next_key": { - "type": "string", - "format": "byte", - "title": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently" - }, - "total": { - "type": "string", - "format": "uint64", - "title": "total is total number of results available if PageRequest.count_total\nwas set, its value is undefined otherwise" } } } - }, - "description": "GetLatestValidatorSetResponse is the response type for the Query/GetValidatorSetByHeight RPC method." - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "type": "object", - "properties": { - "error": { - "type": "string" - }, - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - } - } } - } - } - }, - "parameters": [ - { - "name": "pagination.key", - "description": "key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set.", - "in": "query", - "required": false, - "type": "string", - "format": "byte" - }, - { - "name": "pagination.offset", - "description": "offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set.", - "in": "query", - "required": false, - "type": "string", - "format": "uint64" - }, - { - "name": "pagination.limit", - "description": "limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app.", - "in": "query", - "required": false, - "type": "string", - "format": "uint64" - }, - { - "name": "pagination.count_total", - "description": "count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set.", - "in": "query", - "required": false, - "type": "boolean" - }, - { - "name": "pagination.reverse", - "description": "reverse is set to true if results are to be returned in the descending order.\n\nSince: cosmos-sdk 0.43", - "in": "query", - "required": false, - "type": "boolean" - } - ], - "tags": [ - "Service" - ] - } - }, - "/cosmos/base/tendermint/v1beta1/validatorsets/{height}": { - "get": { - "summary": "GetValidatorSetByHeight queries validator-set at a given height.", - "operationId": "GetValidatorSetByHeight", - "responses": { - "200": { - "description": "A successful response.", - "schema": { + }, + "last_commit": { "type": "object", "properties": { - "block_height": { + "height": { "type": "string", "format": "int64" }, - "validators": { - "type": "array", - "items": { - "type": "object", - "properties": { - "address": { - "type": "string" - }, - "pub_key": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "voting_power": { - "type": "string", - "format": "int64" - }, - "proposer_priority": { - "type": "string", - "format": "int64" - } - }, - "description": "Validator is the type for the validator-set." - } + "round": { + "type": "integer", + "format": "int32" }, - "pagination": { - "description": "pagination defines an pagination for the response.", + "block_id": { "type": "object", "properties": { - "next_key": { + "hash": { "type": "string", - "format": "byte", - "title": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently" + "format": "byte" }, - "total": { - "type": "string", - "format": "uint64", - "title": "total is total number of results available if PageRequest.count_total\nwas set, its value is undefined otherwise" + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" } - } - } - }, - "description": "GetValidatorSetByHeightResponse is the response type for the Query/GetValidatorSetByHeight RPC method." - } - }, - "default": { - "description": "An unexpected error response.", - "schema": { - "type": "object", - "properties": { - "error": { - "type": "string" - }, - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" + }, + "title": "BlockID" }, - "details": { + "signatures": { "type": "array", "items": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - } - } - } - } - } - }, - "parameters": [ - { - "name": "height", - "in": "path", - "required": true, - "type": "string", - "format": "int64" - }, - { - "name": "pagination.key", - "description": "key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set.", - "in": "query", - "required": false, - "type": "string", - "format": "byte" - }, - { - "name": "pagination.offset", - "description": "offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set.", - "in": "query", - "required": false, - "type": "string", - "format": "uint64" - }, - { - "name": "pagination.limit", - "description": "limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app.", - "in": "query", - "required": false, - "type": "string", - "format": "uint64" - }, - { - "name": "pagination.count_total", - "description": "count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set.", - "in": "query", - "required": false, - "type": "boolean" + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } }, - { - "name": "pagination.reverse", - "description": "reverse is set to true if results are to be returned in the descending order.\n\nSince: cosmos-sdk 0.43", - "in": "query", - "required": false, - "type": "boolean" - } - ], - "tags": [ - "Service" - ] - } - } - }, - "definitions": { - "cosmos.base.query.v1beta1.PageRequest": { - "type": "object", - "properties": { - "key": { - "type": "string", - "format": "byte", - "description": "key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set." - }, - "offset": { - "type": "string", - "format": "uint64", - "description": "offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set." - }, - "limit": { - "type": "string", - "format": "uint64", - "description": "limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app." - }, - "count_total": { - "type": "boolean", - "description": "count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set." - }, - "reverse": { - "type": "boolean", - "description": "reverse is set to true if results are to be returned in the descending order.\n\nSince: cosmos-sdk 0.43" - } - }, - "description": "message SomeRequest {\n Foo some_parameter = 1;\n PageRequest pagination = 2;\n }", - "title": "PageRequest is to be embedded in gRPC request messages for efficient\npagination. Ex:" - }, - "cosmos.base.query.v1beta1.PageResponse": { - "type": "object", - "properties": { - "next_key": { - "type": "string", - "format": "byte", - "title": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently" - }, - "total": { - "type": "string", - "format": "uint64", - "title": "total is total number of results available if PageRequest.count_total\nwas set, its value is undefined otherwise" + "description": "Block is tendermint type Block, with the Header proposer address\nfield converted to bech32 string." } }, - "description": "PageResponse is to be embedded in gRPC response messages where the\ncorresponding request message has used PageRequest.\n\n message SomeResponse {\n repeated Bar results = 1;\n PageResponse page = 2;\n }" + "description": "GetBlockByHeightResponse is the response type for the Query/GetBlockByHeight RPC method." }, - "cosmos.base.tendermint.v1beta1.GetBlockByHeightResponse": { + "cosmos.base.tendermint.v1beta1.GetLatestBlockResponse": { "type": "object", "properties": { "block_id": { @@ -2068,6 +5539,7 @@ "title": "BlockID" }, "block": { + "title": "Deprecated: please use `sdk_block` instead", "type": "object", "properties": { "header": { @@ -2163,7 +5635,7 @@ "format": "byte" } }, - "description": "Header defines the structure of a Tendermint block header." + "description": "Header defines the structure of a block header." }, "data": { "type": "object", @@ -2437,7 +5909,7 @@ "format": "byte" } }, - "description": "Header defines the structure of a Tendermint block header." + "description": "Header defines the structure of a block header." }, "commit": { "type": "object", @@ -2535,7 +6007,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -2567,7 +6039,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -2612,7 +6084,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -2712,38 +6184,9 @@ "description": "Commit contains the evidence that a block was committed by a set of validators." } } - } - }, - "description": "GetBlockByHeightResponse is the response type for the Query/GetBlockByHeight RPC method." - }, - "cosmos.base.tendermint.v1beta1.GetLatestBlockResponse": { - "type": "object", - "properties": { - "block_id": { - "type": "object", - "properties": { - "hash": { - "type": "string", - "format": "byte" - }, - "part_set_header": { - "type": "object", - "properties": { - "total": { - "type": "integer", - "format": "int64" - }, - "hash": { - "type": "string", - "format": "byte" - } - }, - "title": "PartsetHeader" - } - }, - "title": "BlockID" }, - "block": { + "sdk_block": { + "title": "Since: cosmos-sdk 0.47", "type": "object", "properties": { "header": { @@ -2836,7 +6279,7 @@ }, "proposer_address": { "type": "string", - "format": "byte" + "description": "proposer_address is the original block proposer address, formatted as a Bech32 string.\nIn Tendermint, this type is `bytes`, but in the SDK, we convert it to a Bech32 string\nfor better UX." } }, "description": "Header defines the structure of a Tendermint block header." @@ -3113,7 +6556,7 @@ "format": "byte" } }, - "description": "Header defines the structure of a Tendermint block header." + "description": "Header defines the structure of a block header." }, "commit": { "type": "object", @@ -3211,7 +6654,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -3243,7 +6686,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -3288,7 +6731,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -3387,7 +6830,8 @@ }, "description": "Commit contains the evidence that a block was committed by a set of validators." } - } + }, + "description": "Block is tendermint type Block, with the Header proposer address\nfield converted to bech32 string." } }, "description": "GetLatestBlockResponse is the response type for the Query/GetLatestBlock RPC method." @@ -3420,7 +6864,7 @@ "description": "Must be a valid serialized protocol buffer of the above specified type." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "voting_power": { "type": "string", @@ -3441,7 +6885,7 @@ "next_key": { "type": "string", "format": "byte", - "title": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently" + "description": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently. It will be empty if\nthere are no more results." }, "total": { "type": "string", @@ -3558,7 +7002,7 @@ "description": "VersionInfo is the type for the GetNodeInfoResponse message." } }, - "description": "GetNodeInfoResponse is the request type for the Query/GetNodeInfo RPC method." + "description": "GetNodeInfoResponse is the response type for the Query/GetNodeInfo RPC method." }, "cosmos.base.tendermint.v1beta1.GetSyncingResponse": { "type": "object", @@ -3597,7 +7041,7 @@ "description": "Must be a valid serialized protocol buffer of the above specified type." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "voting_power": { "type": "string", @@ -3618,7 +7062,7 @@ "next_key": { "type": "string", "format": "byte", - "title": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently" + "description": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently. It will be empty if\nthere are no more results." }, "total": { "type": "string", @@ -3630,6 +7074,101 @@ }, "description": "GetValidatorSetByHeightResponse is the response type for the Query/GetValidatorSetByHeight RPC method." }, + "cosmos.base.tendermint.v1beta1.Header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "description": "proposer_address is the original block proposer address, formatted as a Bech32 string.\nIn Tendermint, this type is `bytes`, but in the SDK, we convert it to a Bech32 string\nfor better UX." + } + }, + "description": "Header defines the structure of a Tendermint block header." + }, "cosmos.base.tendermint.v1beta1.Module": { "type": "object", "properties": { @@ -3648,6 +7187,49 @@ }, "title": "Module is the type for VersionInfo" }, + "cosmos.base.tendermint.v1beta1.ProofOp": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "key": { + "type": "string", + "format": "byte" + }, + "data": { + "type": "string", + "format": "byte" + } + }, + "description": "ProofOp defines an operation used for calculating Merkle root. The data could\nbe arbitrary format, providing necessary data for example neighbouring node\nhash.\n\nNote: This type is a duplicate of the ProofOp proto type defined in Tendermint." + }, + "cosmos.base.tendermint.v1beta1.ProofOps": { + "type": "object", + "properties": { + "ops": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "key": { + "type": "string", + "format": "byte" + }, + "data": { + "type": "string", + "format": "byte" + } + }, + "description": "ProofOp defines an operation used for calculating Merkle root. The data could\nbe arbitrary format, providing necessary data for example neighbouring node\nhash.\n\nNote: This type is a duplicate of the ProofOp proto type defined in Tendermint." + } + } + }, + "description": "ProofOps is Merkle proof defined by the list of ProofOps.\n\nNote: This type is a duplicate of the ProofOps proto type defined in Tendermint." + }, "cosmos.base.tendermint.v1beta1.Validator": { "type": "object", "properties": { @@ -3667,7 +7249,7 @@ "description": "Must be a valid serialized protocol buffer of the above specified type." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "voting_power": { "type": "string", @@ -3742,7 +7324,7 @@ "description": "Must be a valid serialized protocol buffer of the above specified type." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, "grpc.gateway.runtime.Error": { "type": "object", @@ -3772,7 +7354,7 @@ "description": "Must be a valid serialized protocol buffer of the above specified type." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } @@ -3789,7 +7371,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "tendermint.p2p.DefaultNodeInfo": { "type": "object", @@ -3967,7 +7549,7 @@ "format": "byte" } }, - "description": "Header defines the structure of a Tendermint block header." + "description": "Header defines the structure of a block header." }, "data": { "type": "object", @@ -4241,7 +7823,7 @@ "format": "byte" } }, - "description": "Header defines the structure of a Tendermint block header." + "description": "Header defines the structure of a block header." }, "commit": { "type": "object", @@ -4339,7 +7921,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -4371,7 +7953,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -4416,7 +7998,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -5066,7 +8648,7 @@ "format": "byte" } }, - "description": "Header defines the structure of a Tendermint block header." + "description": "Header defines the structure of a block header." }, "commit": { "type": "object", @@ -5164,7 +8746,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -5196,7 +8778,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -5241,7 +8823,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -5525,7 +9107,7 @@ "format": "byte" } }, - "description": "Header defines the structure of a Tendermint block header." + "description": "Header defines the structure of a block header." }, "commit": { "type": "object", @@ -5623,7 +9205,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -5655,7 +9237,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -5700,7 +9282,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -5822,7 +9404,7 @@ "format": "byte" } }, - "description": "Header defines the structure of a Tendermint block header." + "description": "Header defines the structure of a block header." }, "tendermint.types.LightBlock": { "type": "object", @@ -5923,7 +9505,7 @@ "format": "byte" } }, - "description": "Header defines the structure of a Tendermint block header." + "description": "Header defines the structure of a block header." }, "commit": { "type": "object", @@ -6021,7 +9603,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -6053,7 +9635,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -6175,7 +9757,7 @@ "format": "byte" } }, - "description": "Header defines the structure of a Tendermint block header." + "description": "Header defines the structure of a block header." }, "commit": { "type": "object", @@ -6273,7 +9855,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -6305,7 +9887,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -6350,7 +9932,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -6484,7 +10066,7 @@ "format": "byte" } }, - "description": "Header defines the structure of a Tendermint block header." + "description": "Header defines the structure of a block header." }, "commit": { "type": "object", @@ -6588,7 +10170,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -6624,7 +10206,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", @@ -6656,7 +10238,7 @@ "format": "byte" } }, - "title": "PublicKey defines the keys available for use with Tendermint Validators" + "title": "PublicKey defines the keys available for use with Validators" }, "voting_power": { "type": "string", diff --git a/docs/static/cosmos-tx-openapi.json b/docs/static/cosmos-tx-openapi.json index dbe78fb36..d0871922a 100644 --- a/docs/static/cosmos-tx-openapi.json +++ b/docs/static/cosmos-tx-openapi.json @@ -2,86 +2,98 @@ "swagger": "2.0", "info": { "title": "cosmos/tx/v1beta1/service.proto", - "version": "v0.44.4", + "version": "v0.47.8", "description": "A REST interface for tx service endpoints" }, "paths": { - "/cosmos/tx/v1beta1/simulate": { + "/cosmos/tx/v1beta1/decode": { "post": { - "summary": "Simulate simulates executing a transaction for estimating gas usage.", - "operationId": "Simulate", + "summary": "TxDecode decodes the transaction.", + "description": "Since: cosmos-sdk 0.47", + "operationId": "TxDecode", "responses": { "200": { "description": "A successful response.", + "schema": { + "$ref": "#/definitions/cosmos.tx.v1beta1.TxDecodeResponse" + } + }, + "default": { + "description": "An unexpected error response.", "schema": { "type": "object", "properties": { - "gas_info": { - "description": "gas_info is the information about gas used in the simulation.", - "type": "object", - "properties": { - "gas_wanted": { - "type": "string", - "format": "uint64", - "description": "GasWanted is the maximum units of work we allow this tx to perform." - }, - "gas_used": { - "type": "string", - "format": "uint64", - "description": "GasUsed is the amount of gas actually consumed." - } - } + "error": { + "type": "string" }, - "result": { - "description": "result is the result of the simulation.", - "type": "object", - "properties": { - "data": { - "type": "string", - "format": "byte", - "description": "Data is any data returned from message or handler execution. It MUST be\nlength prefixed in order to separate data from multiple message executions." - }, - "log": { - "type": "string", - "description": "Log contains the log information from message or handler execution." - }, - "events": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "attributes": { - "type": "array", - "items": { - "type": "object", - "properties": { - "key": { - "type": "string", - "format": "byte" - }, - "value": { - "type": "string", - "format": "byte" - }, - "index": { - "type": "boolean" - } - }, - "description": "EventAttribute is a single key-value pair, associated with an event." - } - } - }, - "description": "Event allows application developers to attach additional information to\nResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx.\nLater, transactions may be queried using these events." + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." }, - "description": "Events contains a slice of Event objects that were emitted during message\nor handler execution." - } + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } + } + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "tx_bytes": { + "type": "string", + "format": "byte", + "description": "tx_bytes is the raw transaction." + } }, - "description": "SimulateResponse is the response type for the\nService.SimulateRPC method." + "description": "TxDecodeRequest is the request type for the Service.TxDecode\nRPC method.\n\nSince: cosmos-sdk 0.47" + } + } + ], + "tags": [ + "Service" + ] + } + }, + "/cosmos/tx/v1beta1/decode/amino": { + "post": { + "summary": "TxDecodeAmino decodes an Amino transaction from encoded bytes to JSON.", + "description": "Since: cosmos-sdk 0.47", + "operationId": "TxDecodeAmino", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "type": "object", + "properties": { + "amino_json": { + "type": "string" + } + }, + "description": "TxDecodeAminoResponse is the response type for the Service.TxDecodeAmino\nRPC method.\n\nSince: cosmos-sdk 0.47" } }, "default": { @@ -114,7 +126,7 @@ "description": "Must be a valid serialized protocol buffer of the above specified type." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } @@ -127,7 +139,14 @@ "in": "body", "required": true, "schema": { - "$ref": "#/definitions/cosmos.tx.v1beta1.SimulateRequest" + "type": "object", + "properties": { + "amino_binary": { + "type": "string", + "format": "byte" + } + }, + "description": "TxDecodeAminoRequest is the request type for the Service.TxDecodeAmino\nRPC method.\n\nSince: cosmos-sdk 0.47" } } ], @@ -136,15 +155,24 @@ ] } }, - "/cosmos/tx/v1beta1/txs": { - "get": { - "summary": "GetTxsEvent fetches txs by event.", - "operationId": "GetTxsEvent", + "/cosmos/tx/v1beta1/encode": { + "post": { + "summary": "TxEncode encodes the transaction.", + "description": "Since: cosmos-sdk 0.47", + "operationId": "TxEncode", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/cosmos.tx.v1beta1.GetTxsEventResponse" + "type": "object", + "properties": { + "tx_bytes": { + "type": "string", + "format": "byte", + "description": "tx_bytes is the encoded transaction bytes." + } + }, + "description": "TxEncodeResponse is the response type for the\nService.TxEncode method.\n\nSince: cosmos-sdk 0.47" } }, "default": { @@ -177,7 +205,7 @@ "description": "Must be a valid serialized protocol buffer of the above specified type." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } @@ -186,193 +214,189 @@ }, "parameters": [ { - "name": "events", - "description": "events is the list of transaction event type.", - "in": "query", - "required": false, - "type": "array", - "items": { - "type": "string" - }, - "collectionFormat": "multi" - }, - { - "name": "pagination.key", - "description": "key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set.", - "in": "query", - "required": false, - "type": "string", - "format": "byte" - }, - { - "name": "pagination.offset", - "description": "offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set.", - "in": "query", - "required": false, - "type": "string", - "format": "uint64" - }, - { - "name": "pagination.limit", - "description": "limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app.", - "in": "query", - "required": false, - "type": "string", - "format": "uint64" - }, - { - "name": "pagination.count_total", - "description": "count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set.", - "in": "query", - "required": false, - "type": "boolean" - }, - { - "name": "pagination.reverse", - "description": "reverse is set to true if results are to be returned in the descending order.\n\nSince: cosmos-sdk 0.43", - "in": "query", - "required": false, - "type": "boolean" - }, - { - "name": "order_by", - "description": " - ORDER_BY_UNSPECIFIED: ORDER_BY_UNSPECIFIED specifies an unknown sorting order. OrderBy defaults to ASC in this case.\n - ORDER_BY_ASC: ORDER_BY_ASC defines ascending order\n - ORDER_BY_DESC: ORDER_BY_DESC defines descending order", - "in": "query", - "required": false, - "type": "string", - "enum": [ - "ORDER_BY_UNSPECIFIED", - "ORDER_BY_ASC", - "ORDER_BY_DESC" - ], - "default": "ORDER_BY_UNSPECIFIED" + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/cosmos.tx.v1beta1.TxEncodeRequest" + } } ], "tags": [ "Service" ] - }, + } + }, + "/cosmos/tx/v1beta1/encode/amino": { "post": { - "summary": "BroadcastTx broadcast transaction.", - "operationId": "BroadcastTx", + "summary": "TxEncodeAmino encodes an Amino transaction from JSON to encoded bytes.", + "description": "Since: cosmos-sdk 0.47", + "operationId": "TxEncodeAmino", "responses": { "200": { "description": "A successful response.", "schema": { "type": "object", "properties": { - "tx_response": { - "type": "object", - "properties": { - "height": { - "type": "string", - "format": "int64", - "title": "The block height" + "amino_binary": { + "type": "string", + "format": "byte" + } + }, + "description": "TxEncodeAminoResponse is the response type for the Service.TxEncodeAmino\nRPC method.\n\nSince: cosmos-sdk 0.47" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } }, - "txhash": { + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + } + } + } + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "amino_json": { + "type": "string" + } + }, + "description": "TxEncodeAminoRequest is the request type for the Service.TxEncodeAmino\nRPC method.\n\nSince: cosmos-sdk 0.47" + } + } + ], + "tags": [ + "Service" + ] + } + }, + "/cosmos/tx/v1beta1/simulate": { + "post": { + "summary": "Simulate simulates executing a transaction for estimating gas usage.", + "operationId": "Simulate", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "type": "object", + "properties": { + "gas_info": { + "description": "gas_info is the information about gas used in the simulation.", + "type": "object", + "properties": { + "gas_wanted": { "type": "string", - "description": "The transaction hash." + "format": "uint64", + "description": "GasWanted is the maximum units of work we allow this tx to perform." }, - "codespace": { + "gas_used": { "type": "string", - "title": "Namespace for the Code" - }, - "code": { - "type": "integer", - "format": "int64", - "description": "Response code." - }, + "format": "uint64", + "description": "GasUsed is the amount of gas actually consumed." + } + } + }, + "result": { + "description": "result is the result of the simulation.", + "type": "object", + "properties": { "data": { "type": "string", - "description": "Result bytes, if any." + "format": "byte", + "description": "Data is any data returned from message or handler execution. It MUST be\nlength prefixed in order to separate data from multiple message executions.\nDeprecated. This field is still populated, but prefer msg_response instead\nbecause it also contains the Msg response typeURL." }, - "raw_log": { + "log": { "type": "string", - "description": "The output of the application's logger (raw string). May be\nnon-deterministic." + "description": "Log contains the log information from message or handler execution." }, - "logs": { + "events": { "type": "array", "items": { "type": "object", "properties": { - "msg_index": { - "type": "integer", - "format": "int64" - }, - "log": { + "type": { "type": "string" }, - "events": { + "attributes": { "type": "array", "items": { "type": "object", "properties": { - "type": { + "key": { "type": "string" }, - "attributes": { - "type": "array", - "items": { - "type": "object", - "properties": { - "key": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." - } + "value": { + "type": "string" + }, + "index": { + "type": "boolean" } }, - "description": "StringEvent defines en Event object wrapper where all the attributes\ncontain key/value pairs that are strings instead of raw bytes." - }, - "description": "Events contains a slice of Event objects that were emitted during some\nexecution." + "description": "EventAttribute is a single key-value pair, associated with an event." + } } }, - "description": "ABCIMessageLog defines a structure containing an indexed tx ABCI message log." + "description": "Event allows application developers to attach additional information to\nResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx.\nLater, transactions may be queried using these events." }, - "description": "The output of the application's logger (typed). May be non-deterministic." - }, - "info": { - "type": "string", - "description": "Additional information. May be non-deterministic." - }, - "gas_wanted": { - "type": "string", - "format": "int64", - "description": "Amount of gas requested for transaction." - }, - "gas_used": { - "type": "string", - "format": "int64", - "description": "Amount of gas consumed by transaction." + "description": "Events contains a slice of Event objects that were emitted during message\nor handler execution." }, - "tx": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "msg_responses": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "timestamp": { - "type": "string", - "description": "Time of the previous block. For heights > 1, it's the weighted median of\nthe timestamps of the valid votes in the block.LastCommit. For height == 1,\nit's genesis time." + "description": "msg_responses contains the Msg handler responses type packed in Anys.\n\nSince: cosmos-sdk 0.46" } - }, - "description": "TxResponse defines a structure containing relevant tx data and metadata. The\ntags are stringified and the log is JSON decoded." + } } }, - "description": "BroadcastTxResponse is the response type for the\nService.BroadcastTx method." + "description": "SimulateResponse is the response type for the\nService.SimulateRPC method." } }, "default": { @@ -405,7 +429,7 @@ "description": "Must be a valid serialized protocol buffer of the above specified type." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } @@ -418,26 +442,7 @@ "in": "body", "required": true, "schema": { - "type": "object", - "properties": { - "tx_bytes": { - "type": "string", - "format": "byte", - "description": "tx_bytes is the raw transaction." - }, - "mode": { - "type": "string", - "enum": [ - "BROADCAST_MODE_UNSPECIFIED", - "BROADCAST_MODE_BLOCK", - "BROADCAST_MODE_SYNC", - "BROADCAST_MODE_ASYNC" - ], - "default": "BROADCAST_MODE_UNSPECIFIED", - "description": "BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method.\n\n - BROADCAST_MODE_UNSPECIFIED: zero-value for mode ordering\n - BROADCAST_MODE_BLOCK: BROADCAST_MODE_BLOCK defines a tx broadcasting mode where the client waits for\nthe tx to be committed in a block.\n - BROADCAST_MODE_SYNC: BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for\na CheckTx execution response only.\n - BROADCAST_MODE_ASYNC: BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns\nimmediately." - } - }, - "description": "BroadcastTxRequest is the request type for the Service.BroadcastTxRequest\nRPC method." + "$ref": "#/definitions/cosmos.tx.v1beta1.SimulateRequest" } } ], @@ -446,15 +451,15 @@ ] } }, - "/cosmos/tx/v1beta1/txs/{hash}": { + "/cosmos/tx/v1beta1/txs": { "get": { - "summary": "GetTx fetches a tx by hash.", - "operationId": "GetTx", + "summary": "GetTxsEvent fetches txs by event.", + "operationId": "GetTxsEvent", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/cosmos.tx.v1beta1.GetTxResponse" + "$ref": "#/definitions/cosmos.tx.v1beta1.GetTxsEventResponse" } }, "default": { @@ -487,7 +492,7 @@ "description": "Must be a valid serialized protocol buffer of the above specified type." } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" } } } @@ -496,1305 +501,5656 @@ }, "parameters": [ { - "name": "hash", - "description": "hash is the tx hash to query, encoded as a hex string.", - "in": "path", - "required": true, - "type": "string" + "name": "events", + "description": "events is the list of transaction event type.", + "in": "query", + "required": false, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" + }, + { + "name": "pagination.key", + "description": "key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set.", + "in": "query", + "required": false, + "type": "string", + "format": "byte" + }, + { + "name": "pagination.offset", + "description": "offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set.", + "in": "query", + "required": false, + "type": "string", + "format": "uint64" + }, + { + "name": "pagination.limit", + "description": "limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app.", + "in": "query", + "required": false, + "type": "string", + "format": "uint64" + }, + { + "name": "pagination.count_total", + "description": "count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set.", + "in": "query", + "required": false, + "type": "boolean" + }, + { + "name": "pagination.reverse", + "description": "reverse is set to true if results are to be returned in the descending order.\n\nSince: cosmos-sdk 0.43", + "in": "query", + "required": false, + "type": "boolean" + }, + { + "name": "order_by", + "description": " - ORDER_BY_UNSPECIFIED: ORDER_BY_UNSPECIFIED specifies an unknown sorting order. OrderBy defaults to ASC in this case.\n - ORDER_BY_ASC: ORDER_BY_ASC defines ascending order\n - ORDER_BY_DESC: ORDER_BY_DESC defines descending order", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "ORDER_BY_UNSPECIFIED", + "ORDER_BY_ASC", + "ORDER_BY_DESC" + ], + "default": "ORDER_BY_UNSPECIFIED" + }, + { + "name": "page", + "description": "page is the page number to query, starts at 1. If not provided, will default to first page.", + "in": "query", + "required": false, + "type": "string", + "format": "uint64" + }, + { + "name": "limit", + "description": "limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app.", + "in": "query", + "required": false, + "type": "string", + "format": "uint64" } ], "tags": [ "Service" ] - } + }, + "post": { + "summary": "BroadcastTx broadcast transaction.", + "operationId": "BroadcastTx", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "type": "object", + "properties": { + "tx_response": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64", + "title": "The block height" + }, + "txhash": { + "type": "string", + "description": "The transaction hash." + }, + "codespace": { + "type": "string", + "title": "Namespace for the Code" + }, + "code": { + "type": "integer", + "format": "int64", + "description": "Response code." + }, + "data": { + "type": "string", + "description": "Result bytes, if any." + }, + "raw_log": { + "type": "string", + "description": "The output of the application's logger (raw string). May be\nnon-deterministic." + }, + "logs": { + "type": "array", + "items": { + "type": "object", + "properties": { + "msg_index": { + "type": "integer", + "format": "int64" + }, + "log": { + "type": "string" + }, + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "attributes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." + } + } + }, + "description": "StringEvent defines en Event object wrapper where all the attributes\ncontain key/value pairs that are strings instead of raw bytes." + }, + "description": "Events contains a slice of Event objects that were emitted during some\nexecution." + } + }, + "description": "ABCIMessageLog defines a structure containing an indexed tx ABCI message log." + }, + "description": "The output of the application's logger (typed). May be non-deterministic." + }, + "info": { + "type": "string", + "description": "Additional information. May be non-deterministic." + }, + "gas_wanted": { + "type": "string", + "format": "int64", + "description": "Amount of gas requested for transaction." + }, + "gas_used": { + "type": "string", + "format": "int64", + "description": "Amount of gas consumed by transaction." + }, + "tx": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "timestamp": { + "type": "string", + "description": "Time of the previous block. For heights > 1, it's the weighted median of\nthe timestamps of the valid votes in the block.LastCommit. For height == 1,\nit's genesis time." + }, + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "attributes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + }, + "index": { + "type": "boolean" + } + }, + "description": "EventAttribute is a single key-value pair, associated with an event." + } + } + }, + "description": "Event allows application developers to attach additional information to\nResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx.\nLater, transactions may be queried using these events." + }, + "description": "Events defines all the events emitted by processing a transaction. Note,\nthese events include those emitted by processing all the messages and those\nemitted from the ante. Whereas Logs contains the events, with\nadditional metadata, emitted only by processing the messages.\n\nSince: cosmos-sdk 0.42.11, 0.44.5, 0.45" + } + }, + "description": "TxResponse defines a structure containing relevant tx data and metadata. The\ntags are stringified and the log is JSON decoded." + } + }, + "description": "BroadcastTxResponse is the response type for the\nService.BroadcastTx method." + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + } + } + } + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "tx_bytes": { + "type": "string", + "format": "byte", + "description": "tx_bytes is the raw transaction." + }, + "mode": { + "type": "string", + "enum": [ + "BROADCAST_MODE_UNSPECIFIED", + "BROADCAST_MODE_BLOCK", + "BROADCAST_MODE_SYNC", + "BROADCAST_MODE_ASYNC" + ], + "default": "BROADCAST_MODE_UNSPECIFIED", + "description": "BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method.\n\n - BROADCAST_MODE_UNSPECIFIED: zero-value for mode ordering\n - BROADCAST_MODE_BLOCK: DEPRECATED: use BROADCAST_MODE_SYNC instead,\nBROADCAST_MODE_BLOCK is not supported by the SDK from v0.47.x onwards.\n - BROADCAST_MODE_SYNC: BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for\na CheckTx execution response only.\n - BROADCAST_MODE_ASYNC: BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns\nimmediately." + } + }, + "description": "BroadcastTxRequest is the request type for the Service.BroadcastTxRequest\nRPC method." + } + } + ], + "tags": [ + "Service" + ] + } + }, + "/cosmos/tx/v1beta1/txs/block/{height}": { + "get": { + "summary": "GetBlockWithTxs fetches a block with decoded txs.", + "description": "Since: cosmos-sdk 0.45.2", + "operationId": "GetBlockWithTxs", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/cosmos.tx.v1beta1.GetBlockWithTxsResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + } + } + } + } + } + }, + "parameters": [ + { + "name": "height", + "description": "height is the height of the block to query.", + "in": "path", + "required": true, + "type": "string", + "format": "int64" + }, + { + "name": "pagination.key", + "description": "key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set.", + "in": "query", + "required": false, + "type": "string", + "format": "byte" + }, + { + "name": "pagination.offset", + "description": "offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set.", + "in": "query", + "required": false, + "type": "string", + "format": "uint64" + }, + { + "name": "pagination.limit", + "description": "limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app.", + "in": "query", + "required": false, + "type": "string", + "format": "uint64" + }, + { + "name": "pagination.count_total", + "description": "count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set.", + "in": "query", + "required": false, + "type": "boolean" + }, + { + "name": "pagination.reverse", + "description": "reverse is set to true if results are to be returned in the descending order.\n\nSince: cosmos-sdk 0.43", + "in": "query", + "required": false, + "type": "boolean" + } + ], + "tags": [ + "Service" + ] + } + }, + "/cosmos/tx/v1beta1/txs/{hash}": { + "get": { + "summary": "GetTx fetches a tx by hash.", + "operationId": "GetTx", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/cosmos.tx.v1beta1.GetTxResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + } + } + } + } + } + }, + "parameters": [ + { + "name": "hash", + "description": "hash is the tx hash to query, encoded as a hex string.", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "Service" + ] + } } }, "definitions": { "cosmos.base.abci.v1beta1.ABCIMessageLog": { "type": "object", "properties": { - "msg_index": { - "type": "integer", - "format": "int64" - }, - "log": { + "msg_index": { + "type": "integer", + "format": "int64" + }, + "log": { + "type": "string" + }, + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "attributes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." + } + } + }, + "description": "StringEvent defines en Event object wrapper where all the attributes\ncontain key/value pairs that are strings instead of raw bytes." + }, + "description": "Events contains a slice of Event objects that were emitted during some\nexecution." + } + }, + "description": "ABCIMessageLog defines a structure containing an indexed tx ABCI message log." + }, + "cosmos.base.abci.v1beta1.Attribute": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." + }, + "cosmos.base.abci.v1beta1.GasInfo": { + "type": "object", + "properties": { + "gas_wanted": { + "type": "string", + "format": "uint64", + "description": "GasWanted is the maximum units of work we allow this tx to perform." + }, + "gas_used": { + "type": "string", + "format": "uint64", + "description": "GasUsed is the amount of gas actually consumed." + } + }, + "description": "GasInfo defines tx execution gas context." + }, + "cosmos.base.abci.v1beta1.Result": { + "type": "object", + "properties": { + "data": { + "type": "string", + "format": "byte", + "description": "Data is any data returned from message or handler execution. It MUST be\nlength prefixed in order to separate data from multiple message executions.\nDeprecated. This field is still populated, but prefer msg_response instead\nbecause it also contains the Msg response typeURL." + }, + "log": { + "type": "string", + "description": "Log contains the log information from message or handler execution." + }, + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "attributes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + }, + "index": { + "type": "boolean" + } + }, + "description": "EventAttribute is a single key-value pair, associated with an event." + } + } + }, + "description": "Event allows application developers to attach additional information to\nResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx.\nLater, transactions may be queried using these events." + }, + "description": "Events contains a slice of Event objects that were emitted during message\nor handler execution." + }, + "msg_responses": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "description": "msg_responses contains the Msg handler responses type packed in Anys.\n\nSince: cosmos-sdk 0.46" + } + }, + "description": "Result is the union of ResponseFormat and ResponseCheckTx." + }, + "cosmos.base.abci.v1beta1.StringEvent": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "attributes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." + } + } + }, + "description": "StringEvent defines en Event object wrapper where all the attributes\ncontain key/value pairs that are strings instead of raw bytes." + }, + "cosmos.base.abci.v1beta1.TxResponse": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64", + "title": "The block height" + }, + "txhash": { + "type": "string", + "description": "The transaction hash." + }, + "codespace": { + "type": "string", + "title": "Namespace for the Code" + }, + "code": { + "type": "integer", + "format": "int64", + "description": "Response code." + }, + "data": { + "type": "string", + "description": "Result bytes, if any." + }, + "raw_log": { + "type": "string", + "description": "The output of the application's logger (raw string). May be\nnon-deterministic." + }, + "logs": { + "type": "array", + "items": { + "type": "object", + "properties": { + "msg_index": { + "type": "integer", + "format": "int64" + }, + "log": { + "type": "string" + }, + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "attributes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." + } + } + }, + "description": "StringEvent defines en Event object wrapper where all the attributes\ncontain key/value pairs that are strings instead of raw bytes." + }, + "description": "Events contains a slice of Event objects that were emitted during some\nexecution." + } + }, + "description": "ABCIMessageLog defines a structure containing an indexed tx ABCI message log." + }, + "description": "The output of the application's logger (typed). May be non-deterministic." + }, + "info": { + "type": "string", + "description": "Additional information. May be non-deterministic." + }, + "gas_wanted": { + "type": "string", + "format": "int64", + "description": "Amount of gas requested for transaction." + }, + "gas_used": { + "type": "string", + "format": "int64", + "description": "Amount of gas consumed by transaction." + }, + "tx": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "timestamp": { + "type": "string", + "description": "Time of the previous block. For heights > 1, it's the weighted median of\nthe timestamps of the valid votes in the block.LastCommit. For height == 1,\nit's genesis time." + }, + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "attributes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + }, + "index": { + "type": "boolean" + } + }, + "description": "EventAttribute is a single key-value pair, associated with an event." + } + } + }, + "description": "Event allows application developers to attach additional information to\nResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx.\nLater, transactions may be queried using these events." + }, + "description": "Events defines all the events emitted by processing a transaction. Note,\nthese events include those emitted by processing all the messages and those\nemitted from the ante. Whereas Logs contains the events, with\nadditional metadata, emitted only by processing the messages.\n\nSince: cosmos-sdk 0.42.11, 0.44.5, 0.45" + } + }, + "description": "TxResponse defines a structure containing relevant tx data and metadata. The\ntags are stringified and the log is JSON decoded." + }, + "cosmos.base.query.v1beta1.PageRequest": { + "type": "object", + "properties": { + "key": { + "type": "string", + "format": "byte", + "description": "key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set." + }, + "offset": { + "type": "string", + "format": "uint64", + "description": "offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set." + }, + "limit": { + "type": "string", + "format": "uint64", + "description": "limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app." + }, + "count_total": { + "type": "boolean", + "description": "count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set." + }, + "reverse": { + "type": "boolean", + "description": "reverse is set to true if results are to be returned in the descending order.\n\nSince: cosmos-sdk 0.43" + } + }, + "description": "message SomeRequest {\n Foo some_parameter = 1;\n PageRequest pagination = 2;\n }", + "title": "PageRequest is to be embedded in gRPC request messages for efficient\npagination. Ex:" + }, + "cosmos.base.query.v1beta1.PageResponse": { + "type": "object", + "properties": { + "next_key": { + "type": "string", + "format": "byte", + "description": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently. It will be empty if\nthere are no more results." + }, + "total": { + "type": "string", + "format": "uint64", + "title": "total is total number of results available if PageRequest.count_total\nwas set, its value is undefined otherwise" + } + }, + "description": "PageResponse is to be embedded in gRPC response messages where the\ncorresponding request message has used PageRequest.\n\n message SomeResponse {\n repeated Bar results = 1;\n PageResponse page = 2;\n }" + }, + "cosmos.base.v1beta1.Coin": { + "type": "object", + "properties": { + "denom": { + "type": "string" + }, + "amount": { + "type": "string" + } + }, + "description": "Coin defines a token with a denomination and an amount.\n\nNOTE: The amount field is an Int which implements the custom method\nsignatures required by gogoproto." + }, + "cosmos.crypto.multisig.v1beta1.CompactBitArray": { + "type": "object", + "properties": { + "extra_bits_stored": { + "type": "integer", + "format": "int64" + }, + "elems": { + "type": "string", + "format": "byte" + } + }, + "description": "CompactBitArray is an implementation of a space efficient bit array.\nThis is used to ensure that the encoded data takes up a minimal amount of\nspace after proto encoding.\nThis is not thread safe, and is not intended for concurrent usage." + }, + "cosmos.tx.signing.v1beta1.SignMode": { + "type": "string", + "enum": [ + "SIGN_MODE_UNSPECIFIED", + "SIGN_MODE_DIRECT", + "SIGN_MODE_TEXTUAL", + "SIGN_MODE_DIRECT_AUX", + "SIGN_MODE_LEGACY_AMINO_JSON", + "SIGN_MODE_EIP_191" + ], + "default": "SIGN_MODE_UNSPECIFIED", + "description": "SignMode represents a signing mode with its own security guarantees.\n\nThis enum should be considered a registry of all known sign modes\nin the Cosmos ecosystem. Apps are not expected to support all known\nsign modes. Apps that would like to support custom sign modes are\nencouraged to open a small PR against this file to add a new case\nto this SignMode enum describing their sign mode so that different\napps have a consistent version of this enum.\n\n - SIGN_MODE_UNSPECIFIED: SIGN_MODE_UNSPECIFIED specifies an unknown signing mode and will be\nrejected.\n - SIGN_MODE_DIRECT: SIGN_MODE_DIRECT specifies a signing mode which uses SignDoc and is\nverified with raw bytes from Tx.\n - SIGN_MODE_TEXTUAL: SIGN_MODE_TEXTUAL is a future signing mode that will verify some\nhuman-readable textual representation on top of the binary representation\nfrom SIGN_MODE_DIRECT. It is currently not supported.\n - SIGN_MODE_DIRECT_AUX: SIGN_MODE_DIRECT_AUX specifies a signing mode which uses\nSignDocDirectAux. As opposed to SIGN_MODE_DIRECT, this sign mode does not\nrequire signers signing over other signers' `signer_info`. It also allows\nfor adding Tips in transactions.\n\nSince: cosmos-sdk 0.46\n - SIGN_MODE_LEGACY_AMINO_JSON: SIGN_MODE_LEGACY_AMINO_JSON is a backwards compatibility mode which uses\nAmino JSON and will be removed in the future.\n - SIGN_MODE_EIP_191: SIGN_MODE_EIP_191 specifies the sign mode for EIP 191 signing on the Cosmos\nSDK. Ref: https://eips.ethereum.org/EIPS/eip-191\n\nCurrently, SIGN_MODE_EIP_191 is registered as a SignMode enum variant,\nbut is not implemented on the SDK by default. To enable EIP-191, you need\nto pass a custom `TxConfig` that has an implementation of\n`SignModeHandler` for EIP-191. The SDK may decide to fully support\nEIP-191 in the future.\n\nSince: cosmos-sdk 0.45.2" + }, + "cosmos.tx.v1beta1.AuthInfo": { + "type": "object", + "properties": { + "signer_infos": { + "type": "array", + "items": { + "$ref": "#/definitions/cosmos.tx.v1beta1.SignerInfo" + }, + "description": "signer_infos defines the signing modes for the required signers. The number\nand order of elements must match the required signers from TxBody's\nmessages. The first element is the primary signer and the one which pays\nthe fee." + }, + "fee": { + "description": "Fee is the fee and gas limit for the transaction. The first signer is the\nprimary signer and the one which pays the fee. The fee can be calculated\nbased on the cost of evaluating the body and doing signature verification\nof the signers. This can be estimated via simulation.", + "type": "object", + "properties": { + "amount": { + "type": "array", + "items": { + "type": "object", + "properties": { + "denom": { + "type": "string" + }, + "amount": { + "type": "string" + } + }, + "description": "Coin defines a token with a denomination and an amount.\n\nNOTE: The amount field is an Int which implements the custom method\nsignatures required by gogoproto." + }, + "title": "amount is the amount of coins to be paid as a fee" + }, + "gas_limit": { + "type": "string", + "format": "uint64", + "title": "gas_limit is the maximum gas that can be used in transaction processing\nbefore an out of gas error occurs" + }, + "payer": { + "type": "string", + "description": "if unset, the first signer is responsible for paying the fees. If set, the specified account must pay the fees.\nthe payer must be a tx signer (and thus have signed this field in AuthInfo).\nsetting this field does *not* change the ordering of required signers for the transaction." + }, + "granter": { + "type": "string", + "title": "if set, the fee payer (either the first signer or the value of the payer field) requests that a fee grant be used\nto pay fees instead of the fee payer's own balance. If an appropriate fee grant does not exist or the chain does\nnot support fee grants, this will fail" + } + } + }, + "tip": { + "description": "Tip is the optional tip used for transactions fees paid in another denom.\n\nThis field is ignored if the chain didn't enable tips, i.e. didn't add the\n`TipDecorator` in its posthandler.\n\nSince: cosmos-sdk 0.46", + "type": "object", + "properties": { + "amount": { + "type": "array", + "items": { + "type": "object", + "properties": { + "denom": { + "type": "string" + }, + "amount": { + "type": "string" + } + }, + "description": "Coin defines a token with a denomination and an amount.\n\nNOTE: The amount field is an Int which implements the custom method\nsignatures required by gogoproto." + }, + "title": "amount is the amount of the tip" + }, + "tipper": { + "type": "string", + "title": "tipper is the address of the account paying for the tip" + } + } + } + }, + "description": "AuthInfo describes the fee and signer modes that are used to sign a\ntransaction." + }, + "cosmos.tx.v1beta1.BroadcastMode": { + "type": "string", + "enum": [ + "BROADCAST_MODE_UNSPECIFIED", + "BROADCAST_MODE_BLOCK", + "BROADCAST_MODE_SYNC", + "BROADCAST_MODE_ASYNC" + ], + "default": "BROADCAST_MODE_UNSPECIFIED", + "description": "BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method.\n\n - BROADCAST_MODE_UNSPECIFIED: zero-value for mode ordering\n - BROADCAST_MODE_BLOCK: DEPRECATED: use BROADCAST_MODE_SYNC instead,\nBROADCAST_MODE_BLOCK is not supported by the SDK from v0.47.x onwards.\n - BROADCAST_MODE_SYNC: BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for\na CheckTx execution response only.\n - BROADCAST_MODE_ASYNC: BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns\nimmediately." + }, + "cosmos.tx.v1beta1.BroadcastTxRequest": { + "type": "object", + "properties": { + "tx_bytes": { + "type": "string", + "format": "byte", + "description": "tx_bytes is the raw transaction." + }, + "mode": { + "type": "string", + "enum": [ + "BROADCAST_MODE_UNSPECIFIED", + "BROADCAST_MODE_BLOCK", + "BROADCAST_MODE_SYNC", + "BROADCAST_MODE_ASYNC" + ], + "default": "BROADCAST_MODE_UNSPECIFIED", + "description": "BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method.\n\n - BROADCAST_MODE_UNSPECIFIED: zero-value for mode ordering\n - BROADCAST_MODE_BLOCK: DEPRECATED: use BROADCAST_MODE_SYNC instead,\nBROADCAST_MODE_BLOCK is not supported by the SDK from v0.47.x onwards.\n - BROADCAST_MODE_SYNC: BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for\na CheckTx execution response only.\n - BROADCAST_MODE_ASYNC: BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns\nimmediately." + } + }, + "description": "BroadcastTxRequest is the request type for the Service.BroadcastTxRequest\nRPC method." + }, + "cosmos.tx.v1beta1.BroadcastTxResponse": { + "type": "object", + "properties": { + "tx_response": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64", + "title": "The block height" + }, + "txhash": { + "type": "string", + "description": "The transaction hash." + }, + "codespace": { + "type": "string", + "title": "Namespace for the Code" + }, + "code": { + "type": "integer", + "format": "int64", + "description": "Response code." + }, + "data": { + "type": "string", + "description": "Result bytes, if any." + }, + "raw_log": { + "type": "string", + "description": "The output of the application's logger (raw string). May be\nnon-deterministic." + }, + "logs": { + "type": "array", + "items": { + "type": "object", + "properties": { + "msg_index": { + "type": "integer", + "format": "int64" + }, + "log": { + "type": "string" + }, + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "attributes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." + } + } + }, + "description": "StringEvent defines en Event object wrapper where all the attributes\ncontain key/value pairs that are strings instead of raw bytes." + }, + "description": "Events contains a slice of Event objects that were emitted during some\nexecution." + } + }, + "description": "ABCIMessageLog defines a structure containing an indexed tx ABCI message log." + }, + "description": "The output of the application's logger (typed). May be non-deterministic." + }, + "info": { + "type": "string", + "description": "Additional information. May be non-deterministic." + }, + "gas_wanted": { + "type": "string", + "format": "int64", + "description": "Amount of gas requested for transaction." + }, + "gas_used": { + "type": "string", + "format": "int64", + "description": "Amount of gas consumed by transaction." + }, + "tx": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "timestamp": { + "type": "string", + "description": "Time of the previous block. For heights > 1, it's the weighted median of\nthe timestamps of the valid votes in the block.LastCommit. For height == 1,\nit's genesis time." + }, + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "attributes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + }, + "index": { + "type": "boolean" + } + }, + "description": "EventAttribute is a single key-value pair, associated with an event." + } + } + }, + "description": "Event allows application developers to attach additional information to\nResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx.\nLater, transactions may be queried using these events." + }, + "description": "Events defines all the events emitted by processing a transaction. Note,\nthese events include those emitted by processing all the messages and those\nemitted from the ante. Whereas Logs contains the events, with\nadditional metadata, emitted only by processing the messages.\n\nSince: cosmos-sdk 0.42.11, 0.44.5, 0.45" + } + }, + "description": "TxResponse defines a structure containing relevant tx data and metadata. The\ntags are stringified and the log is JSON decoded." + } + }, + "description": "BroadcastTxResponse is the response type for the\nService.BroadcastTx method." + }, + "cosmos.tx.v1beta1.Fee": { + "type": "object", + "properties": { + "amount": { + "type": "array", + "items": { + "type": "object", + "properties": { + "denom": { + "type": "string" + }, + "amount": { + "type": "string" + } + }, + "description": "Coin defines a token with a denomination and an amount.\n\nNOTE: The amount field is an Int which implements the custom method\nsignatures required by gogoproto." + }, + "title": "amount is the amount of coins to be paid as a fee" + }, + "gas_limit": { + "type": "string", + "format": "uint64", + "title": "gas_limit is the maximum gas that can be used in transaction processing\nbefore an out of gas error occurs" + }, + "payer": { + "type": "string", + "description": "if unset, the first signer is responsible for paying the fees. If set, the specified account must pay the fees.\nthe payer must be a tx signer (and thus have signed this field in AuthInfo).\nsetting this field does *not* change the ordering of required signers for the transaction." + }, + "granter": { + "type": "string", + "title": "if set, the fee payer (either the first signer or the value of the payer field) requests that a fee grant be used\nto pay fees instead of the fee payer's own balance. If an appropriate fee grant does not exist or the chain does\nnot support fee grants, this will fail" + } + }, + "description": "Fee includes the amount of coins paid in fees and the maximum\ngas to be used by the transaction. The ratio yields an effective \"gasprice\",\nwhich must be above some miminum to be accepted into the mempool." + }, + "cosmos.tx.v1beta1.GetBlockWithTxsResponse": { + "type": "object", + "properties": { + "txs": { + "type": "array", + "items": { + "$ref": "#/definitions/cosmos.tx.v1beta1.Tx" + }, + "description": "txs are the transactions in the block." + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "block": { + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" + } + }, + "description": "Header defines the structure of a block header." + }, + "data": { + "type": "object", + "properties": { + "txs": { + "type": "array", + "items": { + "type": "string", + "format": "byte" + }, + "description": "Txs that will be applied by state @ block.Height+1.\nNOTE: not all txs here are valid. We're just agreeing on the order first.\nThis means that block.AppHash does not include these txs." + } + }, + "title": "Data contains the set of transactions included in the block" + }, + "evidence": { + "type": "object", + "properties": { + "evidence": { + "type": "array", + "items": { + "type": "object", + "properties": { + "duplicate_vote_evidence": { + "type": "object", + "properties": { + "vote_a": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "vote_b": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "total_voting_power": { + "type": "string", + "format": "int64" + }, + "validator_power": { + "type": "string", + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + }, + "description": "DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes." + }, + "light_client_attack_evidence": { + "type": "object", + "properties": { + "conflicting_block": { + "type": "object", + "properties": { + "signed_header": { + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" + } + }, + "description": "Header defines the structure of a block header." + }, + "commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + } + }, + "validator_set": { + "type": "object", + "properties": { + "validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "proposer": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + } + } + } + } + }, + "common_height": { + "type": "string", + "format": "int64" + }, + "byzantine_validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + }, + "description": "LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client." + } + } + } + } + } + }, + "last_commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + } + }, + "pagination": { + "description": "pagination defines a pagination for the response.", + "type": "object", + "properties": { + "next_key": { + "type": "string", + "format": "byte", + "description": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently. It will be empty if\nthere are no more results." + }, + "total": { + "type": "string", + "format": "uint64", + "title": "total is total number of results available if PageRequest.count_total\nwas set, its value is undefined otherwise" + } + } + } + }, + "description": "GetBlockWithTxsResponse is the response type for the Service.GetBlockWithTxs method.\n\nSince: cosmos-sdk 0.45.2" + }, + "cosmos.tx.v1beta1.GetTxResponse": { + "type": "object", + "properties": { + "tx": { + "$ref": "#/definitions/cosmos.tx.v1beta1.Tx", + "description": "tx is the queried transaction." + }, + "tx_response": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64", + "title": "The block height" + }, + "txhash": { + "type": "string", + "description": "The transaction hash." + }, + "codespace": { + "type": "string", + "title": "Namespace for the Code" + }, + "code": { + "type": "integer", + "format": "int64", + "description": "Response code." + }, + "data": { + "type": "string", + "description": "Result bytes, if any." + }, + "raw_log": { + "type": "string", + "description": "The output of the application's logger (raw string). May be\nnon-deterministic." + }, + "logs": { + "type": "array", + "items": { + "type": "object", + "properties": { + "msg_index": { + "type": "integer", + "format": "int64" + }, + "log": { + "type": "string" + }, + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "attributes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." + } + } + }, + "description": "StringEvent defines en Event object wrapper where all the attributes\ncontain key/value pairs that are strings instead of raw bytes." + }, + "description": "Events contains a slice of Event objects that were emitted during some\nexecution." + } + }, + "description": "ABCIMessageLog defines a structure containing an indexed tx ABCI message log." + }, + "description": "The output of the application's logger (typed). May be non-deterministic." + }, + "info": { + "type": "string", + "description": "Additional information. May be non-deterministic." + }, + "gas_wanted": { + "type": "string", + "format": "int64", + "description": "Amount of gas requested for transaction." + }, + "gas_used": { + "type": "string", + "format": "int64", + "description": "Amount of gas consumed by transaction." + }, + "tx": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "timestamp": { + "type": "string", + "description": "Time of the previous block. For heights > 1, it's the weighted median of\nthe timestamps of the valid votes in the block.LastCommit. For height == 1,\nit's genesis time." + }, + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "attributes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + }, + "index": { + "type": "boolean" + } + }, + "description": "EventAttribute is a single key-value pair, associated with an event." + } + } + }, + "description": "Event allows application developers to attach additional information to\nResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx.\nLater, transactions may be queried using these events." + }, + "description": "Events defines all the events emitted by processing a transaction. Note,\nthese events include those emitted by processing all the messages and those\nemitted from the ante. Whereas Logs contains the events, with\nadditional metadata, emitted only by processing the messages.\n\nSince: cosmos-sdk 0.42.11, 0.44.5, 0.45" + } + }, + "description": "TxResponse defines a structure containing relevant tx data and metadata. The\ntags are stringified and the log is JSON decoded." + } + }, + "description": "GetTxResponse is the response type for the Service.GetTx method." + }, + "cosmos.tx.v1beta1.GetTxsEventResponse": { + "type": "object", + "properties": { + "txs": { + "type": "array", + "items": { + "$ref": "#/definitions/cosmos.tx.v1beta1.Tx" + }, + "description": "txs is the list of queried transactions." + }, + "tx_responses": { + "type": "array", + "items": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64", + "title": "The block height" + }, + "txhash": { + "type": "string", + "description": "The transaction hash." + }, + "codespace": { + "type": "string", + "title": "Namespace for the Code" + }, + "code": { + "type": "integer", + "format": "int64", + "description": "Response code." + }, + "data": { + "type": "string", + "description": "Result bytes, if any." + }, + "raw_log": { + "type": "string", + "description": "The output of the application's logger (raw string). May be\nnon-deterministic." + }, + "logs": { + "type": "array", + "items": { + "type": "object", + "properties": { + "msg_index": { + "type": "integer", + "format": "int64" + }, + "log": { + "type": "string" + }, + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "attributes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." + } + } + }, + "description": "StringEvent defines en Event object wrapper where all the attributes\ncontain key/value pairs that are strings instead of raw bytes." + }, + "description": "Events contains a slice of Event objects that were emitted during some\nexecution." + } + }, + "description": "ABCIMessageLog defines a structure containing an indexed tx ABCI message log." + }, + "description": "The output of the application's logger (typed). May be non-deterministic." + }, + "info": { + "type": "string", + "description": "Additional information. May be non-deterministic." + }, + "gas_wanted": { + "type": "string", + "format": "int64", + "description": "Amount of gas requested for transaction." + }, + "gas_used": { + "type": "string", + "format": "int64", + "description": "Amount of gas consumed by transaction." + }, + "tx": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "timestamp": { + "type": "string", + "description": "Time of the previous block. For heights > 1, it's the weighted median of\nthe timestamps of the valid votes in the block.LastCommit. For height == 1,\nit's genesis time." + }, + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "attributes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + }, + "index": { + "type": "boolean" + } + }, + "description": "EventAttribute is a single key-value pair, associated with an event." + } + } + }, + "description": "Event allows application developers to attach additional information to\nResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx.\nLater, transactions may be queried using these events." + }, + "description": "Events defines all the events emitted by processing a transaction. Note,\nthese events include those emitted by processing all the messages and those\nemitted from the ante. Whereas Logs contains the events, with\nadditional metadata, emitted only by processing the messages.\n\nSince: cosmos-sdk 0.42.11, 0.44.5, 0.45" + } + }, + "description": "TxResponse defines a structure containing relevant tx data and metadata. The\ntags are stringified and the log is JSON decoded." + }, + "description": "tx_responses is the list of queried TxResponses." + }, + "pagination": { + "description": "pagination defines a pagination for the response.\nDeprecated post v0.46.x: use total instead.", + "type": "object", + "properties": { + "next_key": { + "type": "string", + "format": "byte", + "description": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently. It will be empty if\nthere are no more results." + }, + "total": { + "type": "string", + "format": "uint64", + "title": "total is total number of results available if PageRequest.count_total\nwas set, its value is undefined otherwise" + } + } + }, + "total": { + "type": "string", + "format": "uint64", + "title": "total is total number of results available" + } + }, + "description": "GetTxsEventResponse is the response type for the Service.TxsByEvents\nRPC method." + }, + "cosmos.tx.v1beta1.ModeInfo": { + "type": "object", + "properties": { + "single": { + "title": "single represents a single signer", + "type": "object", + "properties": { + "mode": { + "title": "mode is the signing mode of the single signer", + "type": "string", + "enum": [ + "SIGN_MODE_UNSPECIFIED", + "SIGN_MODE_DIRECT", + "SIGN_MODE_TEXTUAL", + "SIGN_MODE_DIRECT_AUX", + "SIGN_MODE_LEGACY_AMINO_JSON", + "SIGN_MODE_EIP_191" + ], + "default": "SIGN_MODE_UNSPECIFIED", + "description": "SignMode represents a signing mode with its own security guarantees.\n\nThis enum should be considered a registry of all known sign modes\nin the Cosmos ecosystem. Apps are not expected to support all known\nsign modes. Apps that would like to support custom sign modes are\nencouraged to open a small PR against this file to add a new case\nto this SignMode enum describing their sign mode so that different\napps have a consistent version of this enum.\n\n - SIGN_MODE_UNSPECIFIED: SIGN_MODE_UNSPECIFIED specifies an unknown signing mode and will be\nrejected.\n - SIGN_MODE_DIRECT: SIGN_MODE_DIRECT specifies a signing mode which uses SignDoc and is\nverified with raw bytes from Tx.\n - SIGN_MODE_TEXTUAL: SIGN_MODE_TEXTUAL is a future signing mode that will verify some\nhuman-readable textual representation on top of the binary representation\nfrom SIGN_MODE_DIRECT. It is currently not supported.\n - SIGN_MODE_DIRECT_AUX: SIGN_MODE_DIRECT_AUX specifies a signing mode which uses\nSignDocDirectAux. As opposed to SIGN_MODE_DIRECT, this sign mode does not\nrequire signers signing over other signers' `signer_info`. It also allows\nfor adding Tips in transactions.\n\nSince: cosmos-sdk 0.46\n - SIGN_MODE_LEGACY_AMINO_JSON: SIGN_MODE_LEGACY_AMINO_JSON is a backwards compatibility mode which uses\nAmino JSON and will be removed in the future.\n - SIGN_MODE_EIP_191: SIGN_MODE_EIP_191 specifies the sign mode for EIP 191 signing on the Cosmos\nSDK. Ref: https://eips.ethereum.org/EIPS/eip-191\n\nCurrently, SIGN_MODE_EIP_191 is registered as a SignMode enum variant,\nbut is not implemented on the SDK by default. To enable EIP-191, you need\nto pass a custom `TxConfig` that has an implementation of\n`SignModeHandler` for EIP-191. The SDK may decide to fully support\nEIP-191 in the future.\n\nSince: cosmos-sdk 0.45.2" + } + } + }, + "multi": { + "$ref": "#/definitions/cosmos.tx.v1beta1.ModeInfo.Multi", + "title": "multi represents a nested multisig signer" + } + }, + "description": "ModeInfo describes the signing mode of a single or nested multisig signer." + }, + "cosmos.tx.v1beta1.ModeInfo.Multi": { + "type": "object", + "properties": { + "bitarray": { + "title": "bitarray specifies which keys within the multisig are signing", + "type": "object", + "properties": { + "extra_bits_stored": { + "type": "integer", + "format": "int64" + }, + "elems": { + "type": "string", + "format": "byte" + } + }, + "description": "CompactBitArray is an implementation of a space efficient bit array.\nThis is used to ensure that the encoded data takes up a minimal amount of\nspace after proto encoding.\nThis is not thread safe, and is not intended for concurrent usage." + }, + "mode_infos": { + "type": "array", + "items": { + "$ref": "#/definitions/cosmos.tx.v1beta1.ModeInfo" + }, + "title": "mode_infos is the corresponding modes of the signers of the multisig\nwhich could include nested multisig public keys" + } + }, + "title": "Multi is the mode info for a multisig public key" + }, + "cosmos.tx.v1beta1.ModeInfo.Single": { + "type": "object", + "properties": { + "mode": { + "title": "mode is the signing mode of the single signer", + "type": "string", + "enum": [ + "SIGN_MODE_UNSPECIFIED", + "SIGN_MODE_DIRECT", + "SIGN_MODE_TEXTUAL", + "SIGN_MODE_DIRECT_AUX", + "SIGN_MODE_LEGACY_AMINO_JSON", + "SIGN_MODE_EIP_191" + ], + "default": "SIGN_MODE_UNSPECIFIED", + "description": "SignMode represents a signing mode with its own security guarantees.\n\nThis enum should be considered a registry of all known sign modes\nin the Cosmos ecosystem. Apps are not expected to support all known\nsign modes. Apps that would like to support custom sign modes are\nencouraged to open a small PR against this file to add a new case\nto this SignMode enum describing their sign mode so that different\napps have a consistent version of this enum.\n\n - SIGN_MODE_UNSPECIFIED: SIGN_MODE_UNSPECIFIED specifies an unknown signing mode and will be\nrejected.\n - SIGN_MODE_DIRECT: SIGN_MODE_DIRECT specifies a signing mode which uses SignDoc and is\nverified with raw bytes from Tx.\n - SIGN_MODE_TEXTUAL: SIGN_MODE_TEXTUAL is a future signing mode that will verify some\nhuman-readable textual representation on top of the binary representation\nfrom SIGN_MODE_DIRECT. It is currently not supported.\n - SIGN_MODE_DIRECT_AUX: SIGN_MODE_DIRECT_AUX specifies a signing mode which uses\nSignDocDirectAux. As opposed to SIGN_MODE_DIRECT, this sign mode does not\nrequire signers signing over other signers' `signer_info`. It also allows\nfor adding Tips in transactions.\n\nSince: cosmos-sdk 0.46\n - SIGN_MODE_LEGACY_AMINO_JSON: SIGN_MODE_LEGACY_AMINO_JSON is a backwards compatibility mode which uses\nAmino JSON and will be removed in the future.\n - SIGN_MODE_EIP_191: SIGN_MODE_EIP_191 specifies the sign mode for EIP 191 signing on the Cosmos\nSDK. Ref: https://eips.ethereum.org/EIPS/eip-191\n\nCurrently, SIGN_MODE_EIP_191 is registered as a SignMode enum variant,\nbut is not implemented on the SDK by default. To enable EIP-191, you need\nto pass a custom `TxConfig` that has an implementation of\n`SignModeHandler` for EIP-191. The SDK may decide to fully support\nEIP-191 in the future.\n\nSince: cosmos-sdk 0.45.2" + } + }, + "title": "Single is the mode info for a single signer. It is structured as a message\nto allow for additional fields such as locale for SIGN_MODE_TEXTUAL in the\nfuture" + }, + "cosmos.tx.v1beta1.OrderBy": { + "type": "string", + "enum": [ + "ORDER_BY_UNSPECIFIED", + "ORDER_BY_ASC", + "ORDER_BY_DESC" + ], + "default": "ORDER_BY_UNSPECIFIED", + "description": "- ORDER_BY_UNSPECIFIED: ORDER_BY_UNSPECIFIED specifies an unknown sorting order. OrderBy defaults to ASC in this case.\n - ORDER_BY_ASC: ORDER_BY_ASC defines ascending order\n - ORDER_BY_DESC: ORDER_BY_DESC defines descending order", + "title": "OrderBy defines the sorting order" + }, + "cosmos.tx.v1beta1.SignerInfo": { + "type": "object", + "properties": { + "public_key": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "mode_info": { + "$ref": "#/definitions/cosmos.tx.v1beta1.ModeInfo", + "title": "mode_info describes the signing mode of the signer and is a nested\nstructure to support nested multisig pubkey's" + }, + "sequence": { + "type": "string", + "format": "uint64", + "description": "sequence is the sequence of the account, which describes the\nnumber of committed transactions signed by a given address. It is used to\nprevent replay attacks." + } + }, + "description": "SignerInfo describes the public key and signing mode of a single top-level\nsigner." + }, + "cosmos.tx.v1beta1.SimulateRequest": { + "type": "object", + "properties": { + "tx": { + "$ref": "#/definitions/cosmos.tx.v1beta1.Tx", + "description": "tx is the transaction to simulate.\nDeprecated. Send raw tx bytes instead." + }, + "tx_bytes": { + "type": "string", + "format": "byte", + "description": "tx_bytes is the raw transaction.\n\nSince: cosmos-sdk 0.43" + } + }, + "description": "SimulateRequest is the request type for the Service.Simulate\nRPC method." + }, + "cosmos.tx.v1beta1.SimulateResponse": { + "type": "object", + "properties": { + "gas_info": { + "description": "gas_info is the information about gas used in the simulation.", + "type": "object", + "properties": { + "gas_wanted": { + "type": "string", + "format": "uint64", + "description": "GasWanted is the maximum units of work we allow this tx to perform." + }, + "gas_used": { + "type": "string", + "format": "uint64", + "description": "GasUsed is the amount of gas actually consumed." + } + } + }, + "result": { + "description": "result is the result of the simulation.", + "type": "object", + "properties": { + "data": { + "type": "string", + "format": "byte", + "description": "Data is any data returned from message or handler execution. It MUST be\nlength prefixed in order to separate data from multiple message executions.\nDeprecated. This field is still populated, but prefer msg_response instead\nbecause it also contains the Msg response typeURL." + }, + "log": { + "type": "string", + "description": "Log contains the log information from message or handler execution." + }, + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "attributes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + }, + "index": { + "type": "boolean" + } + }, + "description": "EventAttribute is a single key-value pair, associated with an event." + } + } + }, + "description": "Event allows application developers to attach additional information to\nResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx.\nLater, transactions may be queried using these events." + }, + "description": "Events contains a slice of Event objects that were emitted during message\nor handler execution." + }, + "msg_responses": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "description": "msg_responses contains the Msg handler responses type packed in Anys.\n\nSince: cosmos-sdk 0.46" + } + } + } + }, + "description": "SimulateResponse is the response type for the\nService.SimulateRPC method." + }, + "cosmos.tx.v1beta1.Tip": { + "type": "object", + "properties": { + "amount": { + "type": "array", + "items": { + "type": "object", + "properties": { + "denom": { + "type": "string" + }, + "amount": { + "type": "string" + } + }, + "description": "Coin defines a token with a denomination and an amount.\n\nNOTE: The amount field is an Int which implements the custom method\nsignatures required by gogoproto." + }, + "title": "amount is the amount of the tip" + }, + "tipper": { + "type": "string", + "title": "tipper is the address of the account paying for the tip" + } + }, + "description": "Tip is the tip used for meta-transactions.\n\nSince: cosmos-sdk 0.46" + }, + "cosmos.tx.v1beta1.Tx": { + "type": "object", + "properties": { + "body": { + "title": "body is the processable content of the transaction", + "type": "object", + "properties": { + "messages": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "description": "messages is a list of messages to be executed. The required signers of\nthose messages define the number and order of elements in AuthInfo's\nsigner_infos and Tx's signatures. Each required signer address is added to\nthe list only the first time it occurs.\nBy convention, the first required signer (usually from the first message)\nis referred to as the primary signer and pays the fee for the whole\ntransaction." + }, + "memo": { + "type": "string", + "description": "memo is any arbitrary note/comment to be added to the transaction.\nWARNING: in clients, any publicly exposed text should not be called memo,\nbut should be called `note` instead (see https://github.com/cosmos/cosmos-sdk/issues/9122)." + }, + "timeout_height": { + "type": "string", + "format": "uint64", + "title": "timeout is the block height after which this transaction will not\nbe processed by the chain" + }, + "extension_options": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "title": "extension_options are arbitrary options that can be added by chains\nwhen the default options are not sufficient. If any of these are present\nand can't be handled, the transaction will be rejected" + }, + "non_critical_extension_options": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "title": "extension_options are arbitrary options that can be added by chains\nwhen the default options are not sufficient. If any of these are present\nand can't be handled, they will be ignored" + } + }, + "description": "TxBody is the body of a transaction that all signers sign over." + }, + "auth_info": { + "$ref": "#/definitions/cosmos.tx.v1beta1.AuthInfo", + "title": "auth_info is the authorization related content of the transaction,\nspecifically signers, signer modes and fee" + }, + "signatures": { + "type": "array", + "items": { + "type": "string", + "format": "byte" + }, + "description": "signatures is a list of signatures that matches the length and order of\nAuthInfo's signer_infos to allow connecting signature meta information like\npublic key and signing mode by position." + } + }, + "description": "Tx is the standard type used for broadcasting transactions." + }, + "cosmos.tx.v1beta1.TxBody": { + "type": "object", + "properties": { + "messages": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "description": "messages is a list of messages to be executed. The required signers of\nthose messages define the number and order of elements in AuthInfo's\nsigner_infos and Tx's signatures. Each required signer address is added to\nthe list only the first time it occurs.\nBy convention, the first required signer (usually from the first message)\nis referred to as the primary signer and pays the fee for the whole\ntransaction." + }, + "memo": { + "type": "string", + "description": "memo is any arbitrary note/comment to be added to the transaction.\nWARNING: in clients, any publicly exposed text should not be called memo,\nbut should be called `note` instead (see https://github.com/cosmos/cosmos-sdk/issues/9122)." + }, + "timeout_height": { + "type": "string", + "format": "uint64", + "title": "timeout is the block height after which this transaction will not\nbe processed by the chain" + }, + "extension_options": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "title": "extension_options are arbitrary options that can be added by chains\nwhen the default options are not sufficient. If any of these are present\nand can't be handled, the transaction will be rejected" + }, + "non_critical_extension_options": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "title": "extension_options are arbitrary options that can be added by chains\nwhen the default options are not sufficient. If any of these are present\nand can't be handled, they will be ignored" + } + }, + "description": "TxBody is the body of a transaction that all signers sign over." + }, + "cosmos.tx.v1beta1.TxDecodeAminoRequest": { + "type": "object", + "properties": { + "amino_binary": { + "type": "string", + "format": "byte" + } + }, + "description": "TxDecodeAminoRequest is the request type for the Service.TxDecodeAmino\nRPC method.\n\nSince: cosmos-sdk 0.47" + }, + "cosmos.tx.v1beta1.TxDecodeAminoResponse": { + "type": "object", + "properties": { + "amino_json": { + "type": "string" + } + }, + "description": "TxDecodeAminoResponse is the response type for the Service.TxDecodeAmino\nRPC method.\n\nSince: cosmos-sdk 0.47" + }, + "cosmos.tx.v1beta1.TxDecodeRequest": { + "type": "object", + "properties": { + "tx_bytes": { + "type": "string", + "format": "byte", + "description": "tx_bytes is the raw transaction." + } + }, + "description": "TxDecodeRequest is the request type for the Service.TxDecode\nRPC method.\n\nSince: cosmos-sdk 0.47" + }, + "cosmos.tx.v1beta1.TxDecodeResponse": { + "type": "object", + "properties": { + "tx": { + "$ref": "#/definitions/cosmos.tx.v1beta1.Tx", + "description": "tx is the decoded transaction." + } + }, + "description": "TxDecodeResponse is the response type for the\nService.TxDecode method.\n\nSince: cosmos-sdk 0.47" + }, + "cosmos.tx.v1beta1.TxEncodeAminoRequest": { + "type": "object", + "properties": { + "amino_json": { + "type": "string" + } + }, + "description": "TxEncodeAminoRequest is the request type for the Service.TxEncodeAmino\nRPC method.\n\nSince: cosmos-sdk 0.47" + }, + "cosmos.tx.v1beta1.TxEncodeAminoResponse": { + "type": "object", + "properties": { + "amino_binary": { + "type": "string", + "format": "byte" + } + }, + "description": "TxEncodeAminoResponse is the response type for the Service.TxEncodeAmino\nRPC method.\n\nSince: cosmos-sdk 0.47" + }, + "cosmos.tx.v1beta1.TxEncodeRequest": { + "type": "object", + "properties": { + "tx": { + "$ref": "#/definitions/cosmos.tx.v1beta1.Tx", + "description": "tx is the transaction to encode." + } + }, + "description": "TxEncodeRequest is the request type for the Service.TxEncode\nRPC method.\n\nSince: cosmos-sdk 0.47" + }, + "cosmos.tx.v1beta1.TxEncodeResponse": { + "type": "object", + "properties": { + "tx_bytes": { + "type": "string", + "format": "byte", + "description": "tx_bytes is the encoded transaction bytes." + } + }, + "description": "TxEncodeResponse is the response type for the\nService.TxEncode method.\n\nSince: cosmos-sdk 0.47" + }, + "google.protobuf.Any": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "grpc.gateway.runtime.Error": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + }, + "details": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\nExample 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\nExample 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + } + } + } + }, + "tendermint.abci.Event": { + "type": "object", + "properties": { + "type": { "type": "string" }, - "events": { + "attributes": { "type": "array", "items": { "type": "object", "properties": { - "type": { + "key": { "type": "string" }, - "attributes": { - "type": "array", - "items": { + "value": { + "type": "string" + }, + "index": { + "type": "boolean" + } + }, + "description": "EventAttribute is a single key-value pair, associated with an event." + } + } + }, + "description": "Event allows application developers to attach additional information to\nResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx.\nLater, transactions may be queried using these events." + }, + "tendermint.abci.EventAttribute": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + }, + "index": { + "type": "boolean" + } + }, + "description": "EventAttribute is a single key-value pair, associated with an event." + }, + "tendermint.crypto.PublicKey": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "tendermint.types.Block": { + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { "type": "object", "properties": { - "key": { - "type": "string" + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" + } + }, + "description": "Header defines the structure of a block header." + }, + "data": { + "type": "object", + "properties": { + "txs": { + "type": "array", + "items": { + "type": "string", + "format": "byte" + }, + "description": "Txs that will be applied by state @ block.Height+1.\nNOTE: not all txs here are valid. We're just agreeing on the order first.\nThis means that block.AppHash does not include these txs." + } + }, + "title": "Data contains the set of transactions included in the block" + }, + "evidence": { + "type": "object", + "properties": { + "evidence": { + "type": "array", + "items": { + "type": "object", + "properties": { + "duplicate_vote_evidence": { + "type": "object", + "properties": { + "vote_a": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "vote_b": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "total_voting_power": { + "type": "string", + "format": "int64" + }, + "validator_power": { + "type": "string", + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + }, + "description": "DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes." + }, + "light_client_attack_evidence": { + "type": "object", + "properties": { + "conflicting_block": { + "type": "object", + "properties": { + "signed_header": { + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" + } + }, + "description": "Header defines the structure of a block header." + }, + "commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + } + }, + "validator_set": { + "type": "object", + "properties": { + "validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "proposer": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + } + } + } + } + }, + "common_height": { + "type": "string", + "format": "int64" + }, + "byzantine_validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } }, - "value": { - "type": "string" - } - }, - "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." + "description": "LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client." + } } } - }, - "description": "StringEvent defines en Event object wrapper where all the attributes\ncontain key/value pairs that are strings instead of raw bytes." - }, - "description": "Events contains a slice of Event objects that were emitted during some\nexecution." - } - }, - "description": "ABCIMessageLog defines a structure containing an indexed tx ABCI message log." - }, - "cosmos.base.abci.v1beta1.Attribute": { - "type": "object", - "properties": { - "key": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." - }, - "cosmos.base.abci.v1beta1.GasInfo": { - "type": "object", - "properties": { - "gas_wanted": { - "type": "string", - "format": "uint64", - "description": "GasWanted is the maximum units of work we allow this tx to perform." - }, - "gas_used": { - "type": "string", - "format": "uint64", - "description": "GasUsed is the amount of gas actually consumed." - } - }, - "description": "GasInfo defines tx execution gas context." - }, - "cosmos.base.abci.v1beta1.Result": { - "type": "object", - "properties": { - "data": { - "type": "string", - "format": "byte", - "description": "Data is any data returned from message or handler execution. It MUST be\nlength prefixed in order to separate data from multiple message executions." - }, - "log": { - "type": "string", - "description": "Log contains the log information from message or handler execution." + } + } }, - "events": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "attributes": { - "type": "array", - "items": { + "last_commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { "type": "object", "properties": { - "key": { - "type": "string", - "format": "byte" + "total": { + "type": "integer", + "format": "int64" }, - "value": { + "hash": { "type": "string", "format": "byte" - }, - "index": { - "type": "boolean" } }, - "description": "EventAttribute is a single key-value pair, associated with an event." + "title": "PartsetHeader" } - } + }, + "title": "BlockID" }, - "description": "Event allows application developers to attach additional information to\nResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx.\nLater, transactions may be queried using these events." + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } }, - "description": "Events contains a slice of Event objects that were emitted during message\nor handler execution." + "description": "Commit contains the evidence that a block was committed by a set of validators." } - }, - "description": "Result is the union of ResponseFormat and ResponseCheckTx." + } }, - "cosmos.base.abci.v1beta1.StringEvent": { + "tendermint.types.BlockID": { "type": "object", "properties": { - "type": { - "type": "string" + "hash": { + "type": "string", + "format": "byte" }, - "attributes": { - "type": "array", - "items": { - "type": "object", - "properties": { - "key": { - "type": "string" - }, - "value": { - "type": "string" - } + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" }, - "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." - } + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" } }, - "description": "StringEvent defines en Event object wrapper where all the attributes\ncontain key/value pairs that are strings instead of raw bytes." + "title": "BlockID" }, - "cosmos.base.abci.v1beta1.TxResponse": { + "tendermint.types.BlockIDFlag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "tendermint.types.Commit": { "type": "object", "properties": { "height": { "type": "string", - "format": "int64", - "title": "The block height" - }, - "txhash": { - "type": "string", - "description": "The transaction hash." - }, - "codespace": { - "type": "string", - "title": "Namespace for the Code" + "format": "int64" }, - "code": { + "round": { "type": "integer", - "format": "int64", - "description": "Response code." - }, - "data": { - "type": "string", - "description": "Result bytes, if any." + "format": "int32" }, - "raw_log": { - "type": "string", - "description": "The output of the application's logger (raw string). May be\nnon-deterministic." + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" }, - "logs": { + "signatures": { "type": "array", "items": { "type": "object", "properties": { - "msg_index": { - "type": "integer", - "format": "int64" + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" }, - "log": { - "type": "string" + "validator_address": { + "type": "string", + "format": "byte" }, - "events": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "attributes": { - "type": "array", - "items": { - "type": "object", - "properties": { - "key": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." - } - } - }, - "description": "StringEvent defines en Event object wrapper where all the attributes\ncontain key/value pairs that are strings instead of raw bytes." - }, - "description": "Events contains a slice of Event objects that were emitted during some\nexecution." - } - }, - "description": "ABCIMessageLog defines a structure containing an indexed tx ABCI message log." - }, - "description": "The output of the application's logger (typed). May be non-deterministic." - }, - "info": { - "type": "string", - "description": "Additional information. May be non-deterministic." - }, - "gas_wanted": { - "type": "string", - "format": "int64", - "description": "Amount of gas requested for transaction." - }, - "gas_used": { - "type": "string", - "format": "int64", - "description": "Amount of gas consumed by transaction." - }, - "tx": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "timestamp": { - "type": "string", - "description": "Time of the previous block. For heights > 1, it's the weighted median of\nthe timestamps of the valid votes in the block.LastCommit. For height == 1,\nit's genesis time." + "description": "CommitSig is a part of the Vote included in a Commit." + } } }, - "description": "TxResponse defines a structure containing relevant tx data and metadata. The\ntags are stringified and the log is JSON decoded." + "description": "Commit contains the evidence that a block was committed by a set of validators." }, - "cosmos.base.query.v1beta1.PageRequest": { + "tendermint.types.CommitSig": { "type": "object", "properties": { - "key": { - "type": "string", - "format": "byte", - "description": "key is a value returned in PageResponse.next_key to begin\nquerying the next page most efficiently. Only one of offset or key\nshould be set." - }, - "offset": { + "block_id_flag": { "type": "string", - "format": "uint64", - "description": "offset is a numeric offset that can be used when key is unavailable.\nIt is less efficient than using key. Only one of offset or key should\nbe set." + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" }, - "limit": { + "validator_address": { "type": "string", - "format": "uint64", - "description": "limit is the total number of results to be returned in the result page.\nIf left empty it will default to a value to be set by each app." - }, - "count_total": { - "type": "boolean", - "description": "count_total is set to true to indicate that the result set should include\na count of the total number of items available for pagination in UIs.\ncount_total is only respected when offset is used. It is ignored when key\nis set." + "format": "byte" }, - "reverse": { - "type": "boolean", - "description": "reverse is set to true if results are to be returned in the descending order.\n\nSince: cosmos-sdk 0.43" - } - }, - "description": "message SomeRequest {\n Foo some_parameter = 1;\n PageRequest pagination = 2;\n }", - "title": "PageRequest is to be embedded in gRPC request messages for efficient\npagination. Ex:" - }, - "cosmos.base.query.v1beta1.PageResponse": { - "type": "object", - "properties": { - "next_key": { + "timestamp": { "type": "string", - "format": "byte", - "title": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently" + "format": "date-time" }, - "total": { + "signature": { "type": "string", - "format": "uint64", - "title": "total is total number of results available if PageRequest.count_total\nwas set, its value is undefined otherwise" + "format": "byte" } }, - "description": "PageResponse is to be embedded in gRPC response messages where the\ncorresponding request message has used PageRequest.\n\n message SomeResponse {\n repeated Bar results = 1;\n PageResponse page = 2;\n }" + "description": "CommitSig is a part of the Vote included in a Commit." }, - "cosmos.base.v1beta1.Coin": { + "tendermint.types.Data": { "type": "object", "properties": { - "denom": { - "type": "string" - }, - "amount": { - "type": "string" + "txs": { + "type": "array", + "items": { + "type": "string", + "format": "byte" + }, + "description": "Txs that will be applied by state @ block.Height+1.\nNOTE: not all txs here are valid. We're just agreeing on the order first.\nThis means that block.AppHash does not include these txs." } }, - "description": "Coin defines a token with a denomination and an amount.\n\nNOTE: The amount field is an Int which implements the custom method\nsignatures required by gogoproto." + "title": "Data contains the set of transactions included in the block" }, - "cosmos.crypto.multisig.v1beta1.CompactBitArray": { + "tendermint.types.DuplicateVoteEvidence": { "type": "object", "properties": { - "extra_bits_stored": { - "type": "integer", + "vote_a": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "vote_b": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "total_voting_power": { + "type": "string", "format": "int64" }, - "elems": { + "validator_power": { "type": "string", - "format": "byte" + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" } }, - "description": "CompactBitArray is an implementation of a space efficient bit array.\nThis is used to ensure that the encoded data takes up a minimal amount of\nspace after proto encoding.\nThis is not thread safe, and is not intended for concurrent usage." - }, - "cosmos.tx.signing.v1beta1.SignMode": { - "type": "string", - "enum": [ - "SIGN_MODE_UNSPECIFIED", - "SIGN_MODE_DIRECT", - "SIGN_MODE_TEXTUAL", - "SIGN_MODE_LEGACY_AMINO_JSON" - ], - "default": "SIGN_MODE_UNSPECIFIED", - "description": "SignMode represents a signing mode with its own security guarantees.\n\n - SIGN_MODE_UNSPECIFIED: SIGN_MODE_UNSPECIFIED specifies an unknown signing mode and will be\nrejected\n - SIGN_MODE_DIRECT: SIGN_MODE_DIRECT specifies a signing mode which uses SignDoc and is\nverified with raw bytes from Tx\n - SIGN_MODE_TEXTUAL: SIGN_MODE_TEXTUAL is a future signing mode that will verify some\nhuman-readable textual representation on top of the binary representation\nfrom SIGN_MODE_DIRECT\n - SIGN_MODE_LEGACY_AMINO_JSON: SIGN_MODE_LEGACY_AMINO_JSON is a backwards compatibility mode which uses\nAmino JSON and will be removed in the future" + "description": "DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes." }, - "cosmos.tx.v1beta1.AuthInfo": { + "tendermint.types.Evidence": { "type": "object", "properties": { - "signer_infos": { - "type": "array", - "items": { - "$ref": "#/definitions/cosmos.tx.v1beta1.SignerInfo" - }, - "description": "signer_infos defines the signing modes for the required signers. The number\nand order of elements must match the required signers from TxBody's\nmessages. The first element is the primary signer and the one which pays\nthe fee." - }, - "fee": { - "description": "Fee is the fee and gas limit for the transaction. The first signer is the\nprimary signer and the one which pays the fee. The fee can be calculated\nbased on the cost of evaluating the body and doing signature verification\nof the signers. This can be estimated via simulation.", + "duplicate_vote_evidence": { "type": "object", "properties": { - "amount": { - "type": "array", - "items": { - "type": "object", - "properties": { - "denom": { - "type": "string" + "vote_a": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } }, - "amount": { - "type": "string" - } + "title": "BlockID" }, - "description": "Coin defines a token with a denomination and an amount.\n\nNOTE: The amount field is an Int which implements the custom method\nsignatures required by gogoproto." + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "vote_b": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } }, - "title": "amount is the amount of coins to be paid as a fee" + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." }, - "gas_limit": { + "total_voting_power": { "type": "string", - "format": "uint64", - "title": "gas_limit is the maximum gas that can be used in transaction processing\nbefore an out of gas error occurs" + "format": "int64" }, - "payer": { + "validator_power": { "type": "string", - "description": "if unset, the first signer is responsible for paying the fees. If set, the specified account must pay the fees.\nthe payer must be a tx signer (and thus have signed this field in AuthInfo).\nsetting this field does *not* change the ordering of required signers for the transaction." + "format": "int64" }, - "granter": { + "timestamp": { "type": "string", - "title": "if set, the fee payer (either the first signer or the value of the payer field) requests that a fee grant be used\nto pay fees instead of the fee payer's own balance. If an appropriate fee grant does not exist or the chain does\nnot support fee grants, this will fail" + "format": "date-time" } - } - } - }, - "description": "AuthInfo describes the fee and signer modes that are used to sign a\ntransaction." - }, - "cosmos.tx.v1beta1.BroadcastMode": { - "type": "string", - "enum": [ - "BROADCAST_MODE_UNSPECIFIED", - "BROADCAST_MODE_BLOCK", - "BROADCAST_MODE_SYNC", - "BROADCAST_MODE_ASYNC" - ], - "default": "BROADCAST_MODE_UNSPECIFIED", - "description": "BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method.\n\n - BROADCAST_MODE_UNSPECIFIED: zero-value for mode ordering\n - BROADCAST_MODE_BLOCK: BROADCAST_MODE_BLOCK defines a tx broadcasting mode where the client waits for\nthe tx to be committed in a block.\n - BROADCAST_MODE_SYNC: BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for\na CheckTx execution response only.\n - BROADCAST_MODE_ASYNC: BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns\nimmediately." - }, - "cosmos.tx.v1beta1.BroadcastTxRequest": { - "type": "object", - "properties": { - "tx_bytes": { - "type": "string", - "format": "byte", - "description": "tx_bytes is the raw transaction." + }, + "description": "DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes." }, - "mode": { - "type": "string", - "enum": [ - "BROADCAST_MODE_UNSPECIFIED", - "BROADCAST_MODE_BLOCK", - "BROADCAST_MODE_SYNC", - "BROADCAST_MODE_ASYNC" - ], - "default": "BROADCAST_MODE_UNSPECIFIED", - "description": "BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method.\n\n - BROADCAST_MODE_UNSPECIFIED: zero-value for mode ordering\n - BROADCAST_MODE_BLOCK: BROADCAST_MODE_BLOCK defines a tx broadcasting mode where the client waits for\nthe tx to be committed in a block.\n - BROADCAST_MODE_SYNC: BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for\na CheckTx execution response only.\n - BROADCAST_MODE_ASYNC: BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns\nimmediately." - } - }, - "description": "BroadcastTxRequest is the request type for the Service.BroadcastTxRequest\nRPC method." - }, - "cosmos.tx.v1beta1.BroadcastTxResponse": { - "type": "object", - "properties": { - "tx_response": { + "light_client_attack_evidence": { "type": "object", "properties": { - "height": { - "type": "string", - "format": "int64", - "title": "The block height" - }, - "txhash": { - "type": "string", - "description": "The transaction hash." + "conflicting_block": { + "type": "object", + "properties": { + "signed_header": { + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" + } + }, + "description": "Header defines the structure of a block header." + }, + "commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + } + }, + "validator_set": { + "type": "object", + "properties": { + "validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "proposer": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + } + } + } + } }, - "codespace": { + "common_height": { "type": "string", - "title": "Namespace for the Code" + "format": "int64" }, - "code": { - "type": "integer", - "format": "int64", - "description": "Response code." + "byzantine_validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } }, - "data": { + "total_voting_power": { "type": "string", - "description": "Result bytes, if any." + "format": "int64" }, - "raw_log": { + "timestamp": { "type": "string", - "description": "The output of the application's logger (raw string). May be\nnon-deterministic." - }, - "logs": { - "type": "array", - "items": { + "format": "date-time" + } + }, + "description": "LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client." + } + } + }, + "tendermint.types.EvidenceList": { + "type": "object", + "properties": { + "evidence": { + "type": "array", + "items": { + "type": "object", + "properties": { + "duplicate_vote_evidence": { "type": "object", "properties": { - "msg_index": { - "type": "integer", + "vote_a": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "vote_b": { + "type": "object", + "properties": { + "type": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." + }, + "total_voting_power": { + "type": "string", "format": "int64" }, - "log": { - "type": "string" + "validator_power": { + "type": "string", + "format": "int64" }, - "events": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "attributes": { - "type": "array", - "items": { + "timestamp": { + "type": "string", + "format": "date-time" + } + }, + "description": "DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes." + }, + "light_client_attack_evidence": { + "type": "object", + "properties": { + "conflicting_block": { + "type": "object", + "properties": { + "signed_header": { + "type": "object", + "properties": { + "header": { "type": "object", "properties": { - "key": { - "type": "string" + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." }, - "value": { + "chain_id": { "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" } }, - "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." + "description": "Header defines the structure of a block header." + }, + "commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." } } }, - "description": "StringEvent defines en Event object wrapper where all the attributes\ncontain key/value pairs that are strings instead of raw bytes." - }, - "description": "Events contains a slice of Event objects that were emitted during some\nexecution." + "validator_set": { + "type": "object", + "properties": { + "validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "proposer": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + } + } + } + } + }, + "common_height": { + "type": "string", + "format": "int64" + }, + "byzantine_validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + }, + "timestamp": { + "type": "string", + "format": "date-time" } }, - "description": "ABCIMessageLog defines a structure containing an indexed tx ABCI message log." - }, - "description": "The output of the application's logger (typed). May be non-deterministic." - }, - "info": { + "description": "LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client." + } + } + } + } + } + }, + "tendermint.types.Header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { "type": "string", - "description": "Additional information. May be non-deterministic." + "format": "uint64" }, - "gas_wanted": { + "app": { "type": "string", - "format": "int64", - "description": "Amount of gas requested for transaction." - }, - "gas_used": { + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { "type": "string", - "format": "int64", - "description": "Amount of gas consumed by transaction." + "format": "byte" }, - "tx": { + "part_set_header": { "type": "object", "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "total": { + "type": "integer", + "format": "int64" }, - "value": { + "hash": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "format": "byte" } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "timestamp": { - "type": "string", - "description": "Time of the previous block. For heights > 1, it's the weighted median of\nthe timestamps of the valid votes in the block.LastCommit. For height == 1,\nit's genesis time." + "title": "PartsetHeader" } }, - "description": "TxResponse defines a structure containing relevant tx data and metadata. The\ntags are stringified and the log is JSON decoded." - } - }, - "description": "BroadcastTxResponse is the response type for the\nService.BroadcastTx method." - }, - "cosmos.tx.v1beta1.Fee": { - "type": "object", - "properties": { - "amount": { - "type": "array", - "items": { - "type": "object", - "properties": { - "denom": { - "type": "string" - }, - "amount": { - "type": "string" - } - }, - "description": "Coin defines a token with a denomination and an amount.\n\nNOTE: The amount field is an Int which implements the custom method\nsignatures required by gogoproto." - }, - "title": "amount is the amount of coins to be paid as a fee" + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" }, - "gas_limit": { + "app_hash": { "type": "string", - "format": "uint64", - "title": "gas_limit is the maximum gas that can be used in transaction processing\nbefore an out of gas error occurs" + "format": "byte" }, - "payer": { + "last_results_hash": { "type": "string", - "description": "if unset, the first signer is responsible for paying the fees. If set, the specified account must pay the fees.\nthe payer must be a tx signer (and thus have signed this field in AuthInfo).\nsetting this field does *not* change the ordering of required signers for the transaction." + "format": "byte" }, - "granter": { + "evidence_hash": { "type": "string", - "title": "if set, the fee payer (either the first signer or the value of the payer field) requests that a fee grant be used\nto pay fees instead of the fee payer's own balance. If an appropriate fee grant does not exist or the chain does\nnot support fee grants, this will fail" + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" } }, - "description": "Fee includes the amount of coins paid in fees and the maximum\ngas to be used by the transaction. The ratio yields an effective \"gasprice\",\nwhich must be above some miminum to be accepted into the mempool." + "description": "Header defines the structure of a block header." }, - "cosmos.tx.v1beta1.GetTxResponse": { + "tendermint.types.LightBlock": { "type": "object", "properties": { - "tx": { - "$ref": "#/definitions/cosmos.tx.v1beta1.Tx", - "description": "tx is the queried transaction." - }, - "tx_response": { + "signed_header": { "type": "object", "properties": { - "height": { - "type": "string", - "format": "int64", - "title": "The block height" - }, - "txhash": { - "type": "string", - "description": "The transaction hash." - }, - "codespace": { - "type": "string", - "title": "Namespace for the Code" - }, - "code": { - "type": "integer", - "format": "int64", - "description": "Response code." - }, - "data": { - "type": "string", - "description": "Result bytes, if any." - }, - "raw_log": { - "type": "string", - "description": "The output of the application's logger (raw string). May be\nnon-deterministic." - }, - "logs": { - "type": "array", - "items": { - "type": "object", - "properties": { - "msg_index": { - "type": "integer", - "format": "int64" - }, - "log": { - "type": "string" + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } }, - "events": { - "type": "array", - "items": { + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { "type": "object", "properties": { - "type": { - "type": "string" + "total": { + "type": "integer", + "format": "int64" }, - "attributes": { - "type": "array", - "items": { - "type": "object", - "properties": { - "key": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." - } + "hash": { + "type": "string", + "format": "byte" } }, - "description": "StringEvent defines en Event object wrapper where all the attributes\ncontain key/value pairs that are strings instead of raw bytes." - }, - "description": "Events contains a slice of Event objects that were emitted during some\nexecution." - } + "title": "PartsetHeader" + } + }, + "title": "BlockID" }, - "description": "ABCIMessageLog defines a structure containing an indexed tx ABCI message log." - }, - "description": "The output of the application's logger (typed). May be non-deterministic." - }, - "info": { - "type": "string", - "description": "Additional information. May be non-deterministic." - }, - "gas_wanted": { - "type": "string", - "format": "int64", - "description": "Amount of gas requested for transaction." - }, - "gas_used": { - "type": "string", - "format": "int64", - "description": "Amount of gas consumed by transaction." - }, - "tx": { - "type": "object", - "properties": { - "type_url": { + "last_commit_hash": { "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "format": "byte", + "title": "hashes of block data" }, - "value": { + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { "type": "string", "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "description": "Header defines the structure of a block header." }, - "timestamp": { - "type": "string", - "description": "Time of the previous block. For heights > 1, it's the weighted median of\nthe timestamps of the valid votes in the block.LastCommit. For height == 1,\nit's genesis time." - } - }, - "description": "TxResponse defines a structure containing relevant tx data and metadata. The\ntags are stringified and the log is JSON decoded." - } - }, - "description": "GetTxResponse is the response type for the Service.GetTx method." - }, - "cosmos.tx.v1beta1.GetTxsEventResponse": { - "type": "object", - "properties": { - "txs": { - "type": "array", - "items": { - "$ref": "#/definitions/cosmos.tx.v1beta1.Tx" - }, - "description": "txs is the list of queried transactions." - }, - "tx_responses": { - "type": "array", - "items": { - "type": "object", - "properties": { - "height": { - "type": "string", - "format": "int64", - "title": "The block height" - }, - "txhash": { - "type": "string", - "description": "The transaction hash." - }, - "codespace": { - "type": "string", - "title": "Namespace for the Code" - }, - "code": { - "type": "integer", - "format": "int64", - "description": "Response code." - }, - "data": { - "type": "string", - "description": "Result bytes, if any." - }, - "raw_log": { - "type": "string", - "description": "The output of the application's logger (raw string). May be\nnon-deterministic." - }, - "logs": { - "type": "array", - "items": { + "commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { "type": "object", "properties": { - "msg_index": { - "type": "integer", - "format": "int64" - }, - "log": { - "type": "string" + "hash": { + "type": "string", + "format": "byte" }, - "events": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "attributes": { - "type": "array", - "items": { - "type": "object", - "properties": { - "key": { - "type": "string" - }, - "value": { - "type": "string" - } - }, - "description": "Attribute defines an attribute wrapper where the key and value are\nstrings instead of raw bytes." - } - } + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" }, - "description": "StringEvent defines en Event object wrapper where all the attributes\ncontain key/value pairs that are strings instead of raw bytes." + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" }, - "description": "Events contains a slice of Event objects that were emitted during some\nexecution." - } - }, - "description": "ABCIMessageLog defines a structure containing an indexed tx ABCI message log." - }, - "description": "The output of the application's logger (typed). May be non-deterministic." - }, - "info": { - "type": "string", - "description": "Additional information. May be non-deterministic." - }, - "gas_wanted": { - "type": "string", - "format": "int64", - "description": "Amount of gas requested for transaction." - }, - "gas_used": { - "type": "string", - "format": "int64", - "description": "Amount of gas consumed by transaction." + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } }, - "tx": { + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + } + }, + "validator_set": { + "type": "object", + "properties": { + "validators": { + "type": "array", + "items": { "type": "object", "properties": { - "type_url": { + "address": { "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "format": "byte" }, - "value": { + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "timestamp": { - "type": "string", - "description": "Time of the previous block. For heights > 1, it's the weighted median of\nthe timestamps of the valid votes in the block.LastCommit. For height == 1,\nit's genesis time." + } } }, - "description": "TxResponse defines a structure containing relevant tx data and metadata. The\ntags are stringified and the log is JSON decoded." - }, - "description": "tx_responses is the list of queried TxResponses." - }, - "pagination": { - "description": "pagination defines an pagination for the response.", - "type": "object", - "properties": { - "next_key": { - "type": "string", - "format": "byte", - "title": "next_key is the key to be passed to PageRequest.key to\nquery the next page most efficiently" + "proposer": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } }, - "total": { - "type": "string", - "format": "uint64", - "title": "total is total number of results available if PageRequest.count_total\nwas set, its value is undefined otherwise" - } - } - } - }, - "description": "GetTxsEventResponse is the response type for the Service.TxsByEvents\nRPC method." - }, - "cosmos.tx.v1beta1.ModeInfo": { - "type": "object", - "properties": { - "single": { - "title": "single represents a single signer", - "type": "object", - "properties": { - "mode": { - "title": "mode is the signing mode of the single signer", + "total_voting_power": { "type": "string", - "enum": [ - "SIGN_MODE_UNSPECIFIED", - "SIGN_MODE_DIRECT", - "SIGN_MODE_TEXTUAL", - "SIGN_MODE_LEGACY_AMINO_JSON" - ], - "default": "SIGN_MODE_UNSPECIFIED", - "description": "SignMode represents a signing mode with its own security guarantees.\n\n - SIGN_MODE_UNSPECIFIED: SIGN_MODE_UNSPECIFIED specifies an unknown signing mode and will be\nrejected\n - SIGN_MODE_DIRECT: SIGN_MODE_DIRECT specifies a signing mode which uses SignDoc and is\nverified with raw bytes from Tx\n - SIGN_MODE_TEXTUAL: SIGN_MODE_TEXTUAL is a future signing mode that will verify some\nhuman-readable textual representation on top of the binary representation\nfrom SIGN_MODE_DIRECT\n - SIGN_MODE_LEGACY_AMINO_JSON: SIGN_MODE_LEGACY_AMINO_JSON is a backwards compatibility mode which uses\nAmino JSON and will be removed in the future" + "format": "int64" } } - }, - "multi": { - "$ref": "#/definitions/cosmos.tx.v1beta1.ModeInfo.Multi", - "title": "multi represents a nested multisig signer" } - }, - "description": "ModeInfo describes the signing mode of a single or nested multisig signer." + } }, - "cosmos.tx.v1beta1.ModeInfo.Multi": { + "tendermint.types.LightClientAttackEvidence": { "type": "object", "properties": { - "bitarray": { - "title": "bitarray specifies which keys within the multisig are signing", + "conflicting_block": { "type": "object", "properties": { - "extra_bits_stored": { - "type": "integer", - "format": "int64" + "signed_header": { + "type": "object", + "properties": { + "header": { + "type": "object", + "properties": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { + "type": "string", + "format": "int64" + }, + "time": { + "type": "string", + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { + "type": "string", + "format": "byte", + "title": "hashes of block data" + }, + "data_hash": { + "type": "string", + "format": "byte" + }, + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" + } + }, + "description": "Header defines the structure of a block header." + }, + "commit": { + "type": "object", + "properties": { + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "signatures": { + "type": "array", + "items": { + "type": "object", + "properties": { + "block_id_flag": { + "type": "string", + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "signature": { + "type": "string", + "format": "byte" + } + }, + "description": "CommitSig is a part of the Vote included in a Commit." + } + } + }, + "description": "Commit contains the evidence that a block was committed by a set of validators." + } + } }, - "elems": { - "type": "string", - "format": "byte" + "validator_set": { + "type": "object", + "properties": { + "validators": { + "type": "array", + "items": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + } + }, + "proposer": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } + } + }, + "total_voting_power": { + "type": "string", + "format": "int64" + } + } } - }, - "description": "CompactBitArray is an implementation of a space efficient bit array.\nThis is used to ensure that the encoded data takes up a minimal amount of\nspace after proto encoding.\nThis is not thread safe, and is not intended for concurrent usage." + } }, - "mode_infos": { + "common_height": { + "type": "string", + "format": "int64" + }, + "byzantine_validators": { "type": "array", "items": { - "$ref": "#/definitions/cosmos.tx.v1beta1.ModeInfo" - }, - "title": "mode_infos is the corresponding modes of the signers of the multisig\nwhich could include nested multisig public keys" - } - }, - "title": "Multi is the mode info for a multisig public key" - }, - "cosmos.tx.v1beta1.ModeInfo.Single": { - "type": "object", - "properties": { - "mode": { - "title": "mode is the signing mode of the single signer", - "type": "string", - "enum": [ - "SIGN_MODE_UNSPECIFIED", - "SIGN_MODE_DIRECT", - "SIGN_MODE_TEXTUAL", - "SIGN_MODE_LEGACY_AMINO_JSON" - ], - "default": "SIGN_MODE_UNSPECIFIED", - "description": "SignMode represents a signing mode with its own security guarantees.\n\n - SIGN_MODE_UNSPECIFIED: SIGN_MODE_UNSPECIFIED specifies an unknown signing mode and will be\nrejected\n - SIGN_MODE_DIRECT: SIGN_MODE_DIRECT specifies a signing mode which uses SignDoc and is\nverified with raw bytes from Tx\n - SIGN_MODE_TEXTUAL: SIGN_MODE_TEXTUAL is a future signing mode that will verify some\nhuman-readable textual representation on top of the binary representation\nfrom SIGN_MODE_DIRECT\n - SIGN_MODE_LEGACY_AMINO_JSON: SIGN_MODE_LEGACY_AMINO_JSON is a backwards compatibility mode which uses\nAmino JSON and will be removed in the future" - } - }, - "title": "Single is the mode info for a single signer. It is structured as a message\nto allow for additional fields such as locale for SIGN_MODE_TEXTUAL in the\nfuture" - }, - "cosmos.tx.v1beta1.OrderBy": { - "type": "string", - "enum": [ - "ORDER_BY_UNSPECIFIED", - "ORDER_BY_ASC", - "ORDER_BY_DESC" - ], - "default": "ORDER_BY_UNSPECIFIED", - "description": "- ORDER_BY_UNSPECIFIED: ORDER_BY_UNSPECIFIED specifies an unknown sorting order. OrderBy defaults to ASC in this case.\n - ORDER_BY_ASC: ORDER_BY_ASC defines ascending order\n - ORDER_BY_DESC: ORDER_BY_DESC defines descending order", - "title": "OrderBy defines the sorting order" - }, - "cosmos.tx.v1beta1.SignerInfo": { - "type": "object", - "properties": { - "public_key": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" + } } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + } }, - "mode_info": { - "$ref": "#/definitions/cosmos.tx.v1beta1.ModeInfo", - "title": "mode_info describes the signing mode of the signer and is a nested\nstructure to support nested multisig pubkey's" + "total_voting_power": { + "type": "string", + "format": "int64" }, - "sequence": { + "timestamp": { "type": "string", - "format": "uint64", - "description": "sequence is the sequence of the account, which describes the\nnumber of committed transactions signed by a given address. It is used to\nprevent replay attacks." + "format": "date-time" } }, - "description": "SignerInfo describes the public key and signing mode of a single top-level\nsigner." + "description": "LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client." }, - "cosmos.tx.v1beta1.SimulateRequest": { + "tendermint.types.PartSetHeader": { "type": "object", "properties": { - "tx": { - "$ref": "#/definitions/cosmos.tx.v1beta1.Tx", - "description": "tx is the transaction to simulate.\nDeprecated. Send raw tx bytes instead." + "total": { + "type": "integer", + "format": "int64" }, - "tx_bytes": { + "hash": { "type": "string", - "format": "byte", - "description": "tx_bytes is the raw transaction.\n\nSince: cosmos-sdk 0.43" + "format": "byte" } }, - "description": "SimulateRequest is the request type for the Service.Simulate\nRPC method." + "title": "PartsetHeader" }, - "cosmos.tx.v1beta1.SimulateResponse": { + "tendermint.types.SignedHeader": { "type": "object", "properties": { - "gas_info": { - "description": "gas_info is the information about gas used in the simulation.", + "header": { "type": "object", "properties": { - "gas_wanted": { + "version": { + "title": "basic block info", + "type": "object", + "properties": { + "block": { + "type": "string", + "format": "uint64" + }, + "app": { + "type": "string", + "format": "uint64" + } + }, + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." + }, + "chain_id": { + "type": "string" + }, + "height": { "type": "string", - "format": "uint64", - "description": "GasWanted is the maximum units of work we allow this tx to perform." + "format": "int64" }, - "gas_used": { + "time": { "type": "string", - "format": "uint64", - "description": "GasUsed is the amount of gas actually consumed." - } - } - }, - "result": { - "description": "result is the result of the simulation.", - "type": "object", - "properties": { - "data": { + "format": "date-time" + }, + "last_block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" + }, + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "last_commit_hash": { "type": "string", "format": "byte", - "description": "Data is any data returned from message or handler execution. It MUST be\nlength prefixed in order to separate data from multiple message executions." + "title": "hashes of block data" }, - "log": { + "data_hash": { "type": "string", - "description": "Log contains the log information from message or handler execution." + "format": "byte" }, - "events": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type": { - "type": "string" - }, - "attributes": { - "type": "array", - "items": { - "type": "object", - "properties": { - "key": { - "type": "string", - "format": "byte" - }, - "value": { - "type": "string", - "format": "byte" - }, - "index": { - "type": "boolean" - } - }, - "description": "EventAttribute is a single key-value pair, associated with an event." - } - } - }, - "description": "Event allows application developers to attach additional information to\nResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx.\nLater, transactions may be queried using these events." - }, - "description": "Events contains a slice of Event objects that were emitted during message\nor handler execution." + "validators_hash": { + "type": "string", + "format": "byte", + "title": "hashes from the app output from the prev block" + }, + "next_validators_hash": { + "type": "string", + "format": "byte" + }, + "consensus_hash": { + "type": "string", + "format": "byte" + }, + "app_hash": { + "type": "string", + "format": "byte" + }, + "last_results_hash": { + "type": "string", + "format": "byte" + }, + "evidence_hash": { + "type": "string", + "format": "byte", + "title": "consensus info" + }, + "proposer_address": { + "type": "string", + "format": "byte" } - } - } - }, - "description": "SimulateResponse is the response type for the\nService.SimulateRPC method." - }, - "cosmos.tx.v1beta1.Tx": { - "type": "object", - "properties": { - "body": { - "title": "body is the processable content of the transaction", + }, + "description": "Header defines the structure of a block header." + }, + "commit": { "type": "object", "properties": { - "messages": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } }, - "description": "messages is a list of messages to be executed. The required signers of\nthose messages define the number and order of elements in AuthInfo's\nsigner_infos and Tx's signatures. Each required signer address is added to\nthe list only the first time it occurs.\nBy convention, the first required signer (usually from the first message)\nis referred to as the primary signer and pays the fee for the whole\ntransaction." - }, - "memo": { - "type": "string", - "description": "memo is any arbitrary note/comment to be added to the transaction.\nWARNING: in clients, any publicly exposed text should not be called memo,\nbut should be called `note` instead (see https://github.com/cosmos/cosmos-sdk/issues/9122)." - }, - "timeout_height": { - "type": "string", - "format": "uint64", - "title": "timeout is the block height after which this transaction will not\nbe processed by the chain" + "title": "BlockID" }, - "extension_options": { + "signatures": { "type": "array", "items": { "type": "object", "properties": { - "type_url": { + "block_id_flag": { "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "enum": [ + "BLOCK_ID_FLAG_UNKNOWN", + "BLOCK_ID_FLAG_ABSENT", + "BLOCK_ID_FLAG_COMMIT", + "BLOCK_ID_FLAG_NIL" + ], + "default": "BLOCK_ID_FLAG_UNKNOWN", + "title": "BlockIdFlag indicates which BlcokID the signature is for" }, - "value": { + "validator_address": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "title": "extension_options are arbitrary options that can be added by chains\nwhen the default options are not sufficient. If any of these are present\nand can't be handled, the transaction will be rejected" - }, - "non_critical_extension_options": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type_url": { + "format": "byte" + }, + "timestamp": { "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "format": "date-time" }, - "value": { + "signature": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "format": "byte" } }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "title": "extension_options are arbitrary options that can be added by chains\nwhen the default options are not sufficient. If any of these are present\nand can't be handled, they will be ignored" + "description": "CommitSig is a part of the Vote included in a Commit." + } } }, - "description": "TxBody is the body of a transaction that all signers sign over." - }, - "auth_info": { - "$ref": "#/definitions/cosmos.tx.v1beta1.AuthInfo", - "title": "auth_info is the authorization related content of the transaction,\nspecifically signers, signer modes and fee" - }, - "signatures": { - "type": "array", - "items": { - "type": "string", - "format": "byte" - }, - "description": "signatures is a list of signatures that matches the length and order of\nAuthInfo's signer_infos to allow connecting signature meta information like\npublic key and signing mode by position." + "description": "Commit contains the evidence that a block was committed by a set of validators." } - }, - "description": "Tx is the standard type used for broadcasting transactions." + } }, - "cosmos.tx.v1beta1.TxBody": { + "tendermint.types.SignedMsgType": { + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" + }, + "tendermint.types.Validator": { "type": "object", "properties": { - "messages": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "description": "messages is a list of messages to be executed. The required signers of\nthose messages define the number and order of elements in AuthInfo's\nsigner_infos and Tx's signatures. Each required signer address is added to\nthe list only the first time it occurs.\nBy convention, the first required signer (usually from the first message)\nis referred to as the primary signer and pays the fee for the whole\ntransaction." - }, - "memo": { - "type": "string", - "description": "memo is any arbitrary note/comment to be added to the transaction.\nWARNING: in clients, any publicly exposed text should not be called memo,\nbut should be called `note` instead (see https://github.com/cosmos/cosmos-sdk/issues/9122)." - }, - "timeout_height": { + "address": { "type": "string", - "format": "uint64", - "title": "timeout is the block height after which this transaction will not\nbe processed by the chain" + "format": "byte" }, - "extension_options": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "secp256k1": { + "type": "string", + "format": "byte" + } }, - "title": "extension_options are arbitrary options that can be added by chains\nwhen the default options are not sufficient. If any of these are present\nand can't be handled, the transaction will be rejected" + "title": "PublicKey defines the keys available for use with Validators" }, - "non_critical_extension_options": { - "type": "array", - "items": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "title": "extension_options are arbitrary options that can be added by chains\nwhen the default options are not sufficient. If any of these are present\nand can't be handled, they will be ignored" - } - }, - "description": "TxBody is the body of a transaction that all signers sign over." - }, - "google.protobuf.Any": { - "type": "object", - "properties": { - "type_url": { + "voting_power": { "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "format": "int64" }, - "value": { + "proposer_priority": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "format": "int64" } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + } }, - "grpc.gateway.runtime.Error": { + "tendermint.types.ValidatorSet": { "type": "object", "properties": { - "error": { - "type": "string" - }, - "code": { - "type": "integer", - "format": "int32" - }, - "message": { - "type": "string" - }, - "details": { + "validators": { "type": "array", "items": { "type": "object", "properties": { - "type_url": { + "address": { "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + "format": "byte" }, - "value": { + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." + "format": "int64" + }, + "proposer_priority": { + "type": "string", + "format": "int64" } + } + } + }, + "proposer": { + "type": "object", + "properties": { + "address": { + "type": "string", + "format": "byte" + }, + "pub_key": { + "type": "object", + "properties": { + "ed25519": { + "type": "string", + "format": "byte" + }, + "secp256k1": { + "type": "string", + "format": "byte" + } + }, + "title": "PublicKey defines the keys available for use with Validators" + }, + "voting_power": { + "type": "string", + "format": "int64" }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := &pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + "proposer_priority": { + "type": "string", + "format": "int64" + } } + }, + "total_voting_power": { + "type": "string", + "format": "int64" } } }, - "tendermint.abci.Event": { + "tendermint.types.Vote": { "type": "object", "properties": { "type": { - "type": "string" + "type": "string", + "enum": [ + "SIGNED_MSG_TYPE_UNKNOWN", + "SIGNED_MSG_TYPE_PREVOTE", + "SIGNED_MSG_TYPE_PRECOMMIT", + "SIGNED_MSG_TYPE_PROPOSAL" + ], + "default": "SIGNED_MSG_TYPE_UNKNOWN", + "description": "SignedMsgType is a type of signed message in the consensus.\n\n - SIGNED_MSG_TYPE_PREVOTE: Votes\n - SIGNED_MSG_TYPE_PROPOSAL: Proposals" }, - "attributes": { - "type": "array", - "items": { - "type": "object", - "properties": { - "key": { - "type": "string", - "format": "byte" - }, - "value": { - "type": "string", - "format": "byte" - }, - "index": { - "type": "boolean" - } + "height": { + "type": "string", + "format": "int64" + }, + "round": { + "type": "integer", + "format": "int32" + }, + "block_id": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "format": "byte" }, - "description": "EventAttribute is a single key-value pair, associated with an event." - } + "part_set_header": { + "type": "object", + "properties": { + "total": { + "type": "integer", + "format": "int64" + }, + "hash": { + "type": "string", + "format": "byte" + } + }, + "title": "PartsetHeader" + } + }, + "title": "BlockID" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "validator_address": { + "type": "string", + "format": "byte" + }, + "validator_index": { + "type": "integer", + "format": "int32" + }, + "signature": { + "type": "string", + "format": "byte" } }, - "description": "Event allows application developers to attach additional information to\nResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx.\nLater, transactions may be queried using these events." + "description": "Vote represents a prevote, precommit, or commit vote from validators for\nconsensus." }, - "tendermint.abci.EventAttribute": { + "tendermint.version.Consensus": { "type": "object", "properties": { - "key": { + "block": { "type": "string", - "format": "byte" + "format": "uint64" }, - "value": { + "app": { "type": "string", - "format": "byte" - }, - "index": { - "type": "boolean" + "format": "uint64" } }, - "description": "EventAttribute is a single key-value pair, associated with an event." + "description": "Consensus captures the consensus rules for processing a block in the blockchain,\nincluding all blockchain data structures and the rules of the application's\nstate transition machine." } } } \ No newline at end of file diff --git a/docs/static/openapi.yml b/docs/static/openapi.yml index 9b2801f70..8bdd51dfd 100644 --- a/docs/static/openapi.yml +++ b/docs/static/openapi.yml @@ -9,8 +9,8 @@ info: description: >- A REST interface for state queries. - Tendermint RPC endpoints are available here: - https://docs.tendermint.com/v0.34/rpc/. + CometBFT RPC endpoints are available here: + https://docs.cometbft.com/v0.37/rpc/. paths: /dcl/compliance/certified-models: get: @@ -46,9 +46,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -72,8 +73,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -84,11 +83,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -179,8 +176,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -191,11 +186,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: vid in: path @@ -306,9 +299,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -332,8 +326,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -344,11 +336,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -493,8 +483,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -505,11 +493,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: vid in: path @@ -627,9 +613,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -653,8 +640,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -665,11 +650,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -821,8 +804,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -833,11 +814,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: cDCertificateId in: path @@ -879,9 +858,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -905,8 +885,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -917,11 +895,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -1012,8 +988,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -1024,11 +998,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: vid in: path @@ -1085,9 +1057,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -1111,8 +1084,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -1123,11 +1094,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -1218,8 +1187,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -1230,11 +1197,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: vid in: path @@ -1280,7 +1245,7 @@ paths: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the @@ -1331,19 +1296,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are + no widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of - the above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -1377,6 +1340,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -1391,10 +1358,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -1413,7 +1383,6 @@ paths: name "y.z". - JSON ==== @@ -1488,6 +1457,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string vendorID: @@ -1503,6 +1475,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string productIDs: @@ -1522,9 +1497,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -1548,8 +1524,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -1560,7 +1534,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -1611,19 +1585,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -1656,6 +1628,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -1670,10 +1646,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -1692,7 +1671,6 @@ paths: name "y.z". - JSON ==== @@ -1812,8 +1790,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -1824,7 +1800,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -1875,19 +1851,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -1920,6 +1894,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -1934,10 +1912,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -1956,7 +1937,6 @@ paths: name "y.z". - JSON ==== @@ -2019,7 +1999,7 @@ paths: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the @@ -2070,19 +2050,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no + widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the - above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -2115,6 +2093,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -2129,10 +2111,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -2151,7 +2136,6 @@ paths: name "y.z". - JSON ==== @@ -2226,6 +2210,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vendorID: @@ -2241,6 +2228,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string productIDs: @@ -2259,8 +2249,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -2271,7 +2259,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -2322,19 +2310,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -2367,6 +2353,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -2381,10 +2371,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -2403,7 +2396,6 @@ paths: name "y.z". - JSON ==== @@ -2476,7 +2468,7 @@ paths: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies @@ -2528,19 +2520,18 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there + are no widely used type server + + implementations and no plans to implement + one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer - of the above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -2574,6 +2565,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -2588,10 +2583,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -2610,7 +2608,6 @@ paths: name "y.z". - JSON ==== @@ -2685,6 +2682,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string vendorID: @@ -2700,6 +2700,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string productIDs: @@ -2713,15 +2716,20 @@ paths: max: type: integer format: int32 + title: |- + TODO issue 99: do we need that ??? + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; pagination: type: object properties: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -2745,8 +2753,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -2757,7 +2763,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -2808,19 +2814,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -2853,6 +2857,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -2867,10 +2875,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -2889,7 +2900,6 @@ paths: name "y.z". - JSON ==== @@ -3012,7 +3022,7 @@ paths: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies @@ -3064,19 +3074,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are + no widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of - the above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -3110,6 +3118,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -3124,10 +3136,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -3146,7 +3161,6 @@ paths: name "y.z". - JSON ==== @@ -3221,6 +3235,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string vendorID: @@ -3236,6 +3253,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string productIDs: @@ -3249,13 +3269,15 @@ paths: max: type: integer format: int32 + title: |- + TODO issue 99: do we need that ??? + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; default: description: An unexpected error response. schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -3266,7 +3288,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -3317,19 +3339,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -3362,6 +3382,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -3376,10 +3400,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -3398,7 +3425,6 @@ paths: name "y.z". - JSON ==== @@ -3472,6 +3498,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string pagination: @@ -3480,9 +3509,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -3506,8 +3536,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -3518,7 +3546,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -3569,19 +3597,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -3614,6 +3640,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -3628,10 +3658,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -3650,7 +3683,6 @@ paths: name "y.z". - JSON ==== @@ -3774,6 +3806,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string default: @@ -3781,8 +3816,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -3793,7 +3826,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -3844,19 +3877,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -3889,6 +3920,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -3903,10 +3938,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -3925,7 +3963,6 @@ paths: name "y.z". - JSON ==== @@ -3998,7 +4035,7 @@ paths: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies @@ -4050,19 +4087,18 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there + are no widely used type server + + implementations and no plans to implement + one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer - of the above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -4096,6 +4132,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -4110,10 +4150,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -4132,7 +4175,6 @@ paths: name "y.z". - JSON ==== @@ -4207,6 +4249,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string vendorID: @@ -4222,6 +4267,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string productIDs: @@ -4241,9 +4289,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -4267,8 +4316,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -4279,7 +4326,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -4330,19 +4377,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -4375,6 +4420,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -4389,10 +4438,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -4411,7 +4463,6 @@ paths: name "y.z". - JSON ==== @@ -4534,7 +4585,7 @@ paths: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies @@ -4586,19 +4637,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are + no widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of - the above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -4632,6 +4681,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -4646,10 +4699,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -4668,7 +4724,6 @@ paths: name "y.z". - JSON ==== @@ -4743,6 +4798,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string vendorID: @@ -4758,6 +4816,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string productIDs: @@ -4776,8 +4837,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -4788,7 +4847,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -4839,19 +4898,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -4884,6 +4941,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -4898,10 +4959,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -4920,7 +4984,6 @@ paths: name "y.z". - JSON ==== @@ -4993,7 +5056,7 @@ paths: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies @@ -5045,19 +5108,18 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there + are no widely used type server + + implementations and no plans to implement + one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer - of the above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -5091,6 +5153,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -5105,10 +5171,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -5127,7 +5196,6 @@ paths: name "y.z". - JSON ==== @@ -5202,6 +5270,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string vendorID: @@ -5217,6 +5288,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string productIDs: @@ -5240,6 +5314,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string reason: @@ -5254,9 +5331,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -5280,8 +5358,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -5292,7 +5368,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -5343,19 +5419,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -5388,6 +5462,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -5402,10 +5480,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -5424,7 +5505,6 @@ paths: name "y.z". - JSON ==== @@ -5547,7 +5627,7 @@ paths: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies @@ -5599,19 +5679,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are + no widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of - the above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -5645,6 +5723,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -5659,10 +5741,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -5681,7 +5766,6 @@ paths: name "y.z". - JSON ==== @@ -5756,6 +5840,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string vendorID: @@ -5771,6 +5858,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string productIDs: @@ -5794,6 +5884,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string reason: @@ -5807,8 +5900,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -5819,7 +5910,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -5870,19 +5961,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -5915,6 +6004,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -5929,10 +6022,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -5951,7 +6047,6 @@ paths: name "y.z". - JSON ==== @@ -6051,9 +6146,7 @@ paths: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -6072,7 +6165,7 @@ paths: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the @@ -6123,19 +6216,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are + no widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of - the above specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -6151,6 +6242,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string rejects: @@ -6163,6 +6257,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string pagination: @@ -6171,9 +6268,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -6197,8 +6295,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -6209,7 +6305,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -6260,19 +6356,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -6305,6 +6399,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -6319,10 +6417,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -6341,7 +6442,6 @@ paths: name "y.z". - JSON ==== @@ -6491,9 +6591,7 @@ paths: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -6512,7 +6610,7 @@ paths: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the @@ -6563,19 +6661,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no + widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the - above specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -6591,6 +6687,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string rejects: @@ -6603,6 +6702,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string default: @@ -6610,8 +6712,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -6622,7 +6722,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -6673,19 +6773,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -6718,6 +6816,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -6732,10 +6834,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -6754,7 +6859,6 @@ paths: name "y.z". - JSON ==== @@ -6854,9 +6958,7 @@ paths: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -6875,7 +6977,7 @@ paths: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the @@ -6926,19 +7028,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are + no widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of - the above specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -6954,6 +7054,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string rejects: @@ -6966,6 +7069,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string pagination: @@ -6974,9 +7080,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -7000,8 +7107,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -7012,7 +7117,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -7063,19 +7168,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -7108,6 +7211,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -7122,10 +7229,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -7144,7 +7254,6 @@ paths: name "y.z". - JSON ==== @@ -7294,9 +7403,7 @@ paths: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -7315,7 +7422,7 @@ paths: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the @@ -7366,19 +7473,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no + widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the - above specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -7394,6 +7499,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string rejects: @@ -7406,6 +7514,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string default: @@ -7413,8 +7524,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -7425,7 +7534,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -7476,19 +7585,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -7521,6 +7628,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -7535,10 +7646,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -7557,7 +7671,6 @@ paths: name "y.z". - JSON ==== @@ -7657,9 +7770,7 @@ paths: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -7678,7 +7789,7 @@ paths: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the @@ -7729,19 +7840,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are + no widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of - the above specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -7757,6 +7866,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string rejects: @@ -7769,6 +7881,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string pagination: @@ -7777,9 +7892,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -7803,8 +7919,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -7815,7 +7929,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -7866,19 +7980,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -7911,6 +8023,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -7925,10 +8041,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -7947,7 +8066,6 @@ paths: name "y.z". - JSON ==== @@ -8097,9 +8215,7 @@ paths: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -8118,7 +8234,7 @@ paths: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the @@ -8169,19 +8285,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no + widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the - above specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -8197,6 +8311,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string rejects: @@ -8209,6 +8326,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string default: @@ -8216,8 +8336,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -8228,7 +8346,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -8279,19 +8397,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -8324,6 +8440,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -8338,10 +8458,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -8360,7 +8483,6 @@ paths: name "y.z". - JSON ==== @@ -8476,9 +8598,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -8502,8 +8625,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -8514,11 +8635,9 @@ paths: items: type: object properties: - type_url: + '@type': type: string - value: - type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -8611,8 +8730,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -8623,11 +8740,9 @@ paths: items: type: object properties: - type_url: + '@type': type: string - value: - type: string - format: byte + additionalProperties: {} parameters: - name: vid in: path @@ -8702,8 +8817,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -8714,11 +8827,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: vid in: path @@ -8761,8 +8872,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -8773,11 +8882,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: vid in: path @@ -8850,8 +8957,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -8862,11 +8967,9 @@ paths: items: type: object properties: - type_url: + '@type': type: string - value: - type: string - format: byte + additionalProperties: {} parameters: - name: vid in: path @@ -8939,6 +9042,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January + 1, 1970 UTC info: type: string subjectAsText: @@ -8953,6 +9059,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January + 1, 1970 UTC info: type: string vid: @@ -8972,9 +9081,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -8998,8 +9108,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -9010,11 +9118,9 @@ paths: items: type: object properties: - type_url: + '@type': type: string - value: - type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -9102,8 +9208,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -9114,11 +9218,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: subject in: path @@ -9178,6 +9280,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string subjectAsText: @@ -9192,6 +9297,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string vid: @@ -9210,8 +9318,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -9222,11 +9328,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: subject in: path @@ -9269,8 +9373,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -9281,11 +9383,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: issuer in: path @@ -9350,6 +9450,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January + 1, 1970 UTC info: type: string subjectAsText: @@ -9364,6 +9467,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January + 1, 1970 UTC info: type: string vid: @@ -9380,9 +9486,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -9406,8 +9513,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -9418,11 +9523,9 @@ paths: items: type: object properties: - type_url: + '@type': type: string - value: - type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -9533,6 +9636,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string subjectAsText: @@ -9547,6 +9653,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string vid: @@ -9562,8 +9671,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -9574,11 +9681,9 @@ paths: items: type: object properties: - type_url: + '@type': type: string - value: - type: string - format: byte + additionalProperties: {} parameters: - name: vid in: path @@ -9640,6 +9745,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January + 1, 1970 UTC info: type: string subjectAsText: @@ -9654,6 +9762,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January + 1, 1970 UTC info: type: string vid: @@ -9670,9 +9781,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -9696,8 +9808,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -9708,11 +9818,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -9823,6 +9931,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string subjectAsText: @@ -9837,6 +9948,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string vid: @@ -9852,8 +9966,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -9864,11 +9976,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: vid in: path @@ -10026,6 +10136,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string subjectAsText: @@ -10040,6 +10153,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string vid: @@ -10057,9 +10173,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -10083,8 +10200,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -10095,11 +10210,9 @@ paths: items: type: object properties: - type_url: + '@type': type: string - value: - type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -10192,6 +10305,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string subjectAsText: @@ -10206,6 +10322,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vid: @@ -10222,8 +10341,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -10234,11 +10351,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: subject in: path @@ -10279,6 +10394,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string subjectAsText: @@ -10296,9 +10414,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -10322,8 +10441,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -10334,11 +10451,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -10425,6 +10540,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string subjectAsText: @@ -10441,8 +10559,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -10453,11 +10569,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: subject in: path @@ -10527,6 +10641,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January + 1, 1970 UTC info: type: string subjectAsText: @@ -10541,6 +10658,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January + 1, 1970 UTC info: type: string vid: @@ -10560,9 +10680,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -10586,8 +10707,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -10598,11 +10717,9 @@ paths: items: type: object properties: - type_url: + '@type': type: string - value: - type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -10714,6 +10831,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string subjectAsText: @@ -10728,6 +10848,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string vid: @@ -10746,8 +10869,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -10758,11 +10879,9 @@ paths: items: type: object properties: - type_url: + '@type': type: string - value: - type: string - format: byte + additionalProperties: {} parameters: - name: subject in: path @@ -10827,9 +10946,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -10853,8 +10973,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -10865,11 +10983,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -10986,8 +11102,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -10998,11 +11112,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: issuerSubjectKeyID in: path @@ -11060,8 +11172,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -11072,11 +11182,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: issuerSubjectKeyID in: path @@ -11147,6 +11255,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January + 1, 1970 UTC info: type: string subjectAsText: @@ -11161,6 +11272,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January + 1, 1970 UTC info: type: string vid: @@ -11180,9 +11294,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -11206,8 +11321,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -11218,11 +11331,9 @@ paths: items: type: object properties: - type_url: + '@type': type: string - value: - type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -11334,6 +11445,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string subjectAsText: @@ -11348,6 +11462,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string vid: @@ -11366,8 +11483,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -11378,11 +11493,9 @@ paths: items: type: object properties: - type_url: + '@type': type: string - value: - type: string - format: byte + additionalProperties: {} parameters: - name: subject in: path @@ -11448,6 +11561,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January + 1, 1970 UTC info: type: string subjectAsText: @@ -11462,6 +11578,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January + 1, 1970 UTC info: type: string vid: @@ -11478,9 +11597,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -11504,8 +11624,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -11516,11 +11634,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -11632,6 +11748,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string subjectAsText: @@ -11646,6 +11765,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string vid: @@ -11661,8 +11783,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -11673,11 +11793,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: subject in: path @@ -11716,8 +11834,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -11728,11 +11844,9 @@ paths: items: type: object properties: - type_url: + '@type': type: string - value: - type: string - format: byte + additionalProperties: {} tags: - Query /dcl/pki/root-certificates: @@ -11762,8 +11876,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -11774,11 +11886,9 @@ paths: items: type: object properties: - type_url: + '@type': type: string - value: - type: string - format: byte + additionalProperties: {} tags: - Query /dcl/validator/disabled-nodes: @@ -11810,6 +11920,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string disabledByNodeAdmin: @@ -11824,6 +11937,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string pagination: @@ -11832,9 +11948,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -11858,8 +11975,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -11870,7 +11985,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -11921,19 +12036,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -11966,6 +12079,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -11980,10 +12097,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -12002,7 +12122,6 @@ paths: name "y.z". - JSON ==== @@ -12128,6 +12247,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string disabledByNodeAdmin: @@ -12142,6 +12264,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string default: @@ -12149,8 +12274,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -12161,7 +12284,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -12212,19 +12335,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -12257,6 +12378,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -12271,10 +12396,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -12293,7 +12421,6 @@ paths: name "y.z". - JSON ==== @@ -12366,9 +12493,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -12392,8 +12520,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -12404,7 +12530,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -12455,19 +12581,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -12500,6 +12624,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -12514,10 +12642,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -12536,7 +12667,6 @@ paths: name "y.z". - JSON ==== @@ -12658,8 +12788,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -12670,7 +12798,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -12721,19 +12849,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -12766,6 +12892,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -12780,10 +12910,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -12802,7 +12935,6 @@ paths: name "y.z". - JSON ==== @@ -12884,9 +13016,10 @@ paths: type: string description: optional details. pubKey: + title: the consensus public key of the tendermint validator type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the @@ -12937,19 +13070,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no + widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the - above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -12982,6 +13113,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -12996,10 +13131,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -13018,7 +13156,6 @@ paths: name "y.z". - JSON ==== @@ -13060,7 +13197,6 @@ paths: "@type": "type.googleapis.com/google.protobuf.Duration", "value": "1.212s" } - title: the consensus public key of the tendermint validator power: type: integer format: int32 @@ -13077,9 +13213,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -13103,8 +13240,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -13115,7 +13250,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -13166,19 +13301,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -13211,6 +13344,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -13225,10 +13362,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -13247,7 +13387,6 @@ paths: name "y.z". - JSON ==== @@ -13379,9 +13518,10 @@ paths: type: string description: optional details. pubKey: + title: the consensus public key of the tendermint validator type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type @@ -13432,19 +13572,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no + widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the - above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -13477,6 +13615,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -13491,10 +13633,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -13513,7 +13658,6 @@ paths: name "y.z". - JSON ==== @@ -13553,7 +13697,6 @@ paths: "@type": "type.googleapis.com/google.protobuf.Duration", "value": "1.212s" } - title: the consensus public key of the tendermint validator power: type: integer format: int32 @@ -13569,8 +13712,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -13581,7 +13722,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -13632,19 +13773,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -13677,6 +13816,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -13691,10 +13834,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -13713,7 +13859,6 @@ paths: name "y.z". - JSON ==== @@ -13789,6 +13934,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string rejects: @@ -13801,6 +13949,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string pagination: @@ -13809,9 +13960,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -13835,8 +13987,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -13847,7 +13997,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -13898,19 +14048,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -13943,6 +14091,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -13957,10 +14109,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -13979,7 +14134,6 @@ paths: name "y.z". - JSON ==== @@ -14105,6 +14259,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string rejects: @@ -14117,6 +14274,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string default: @@ -14124,8 +14284,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -14136,7 +14294,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -14187,19 +14345,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -14232,6 +14388,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -14246,10 +14406,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -14268,7 +14431,6 @@ paths: name "y.z". - JSON ==== @@ -14344,6 +14506,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string rejects: @@ -14356,6 +14521,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, + 1970 UTC info: type: string pagination: @@ -14364,9 +14532,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -14390,8 +14559,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -14402,7 +14569,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -14453,19 +14620,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -14498,6 +14663,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -14512,10 +14681,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -14534,7 +14706,6 @@ paths: name "y.z". - JSON ==== @@ -14660,6 +14831,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string rejects: @@ -14672,6 +14846,9 @@ paths: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string default: @@ -14679,8 +14856,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -14691,7 +14866,7 @@ paths: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -14742,19 +14917,17 @@ paths: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -14787,6 +14960,10 @@ paths: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -14801,10 +14978,13 @@ paths: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -14823,7 +15003,6 @@ paths: name "y.z". - JSON ==== @@ -14907,9 +15086,10 @@ paths: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -14933,8 +15113,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -14945,11 +15123,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: pagination.key description: |- @@ -15043,8 +15219,6 @@ paths: schema: type: object properties: - error: - type: string code: type: integer format: int32 @@ -15055,11 +15229,9 @@ paths: items: type: object properties: - type_url: - type: string - value: + '@type': type: string - format: byte + additionalProperties: {} parameters: - name: vendorID in: path @@ -15128,9 +15300,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -15148,7 +15321,7 @@ definitions: google.protobuf.Any: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of the @@ -15191,18 +15364,16 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely used type + server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above specified - type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -15233,6 +15404,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -15247,10 +15422,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -15265,7 +15443,6 @@ definitions: name "y.z". - JSON ==== @@ -15300,11 +15477,9 @@ definitions: "@type": "type.googleapis.com/google.protobuf.Duration", "value": "1.212s" } - grpc.gateway.runtime.Error: + google.rpc.Status: type: object properties: - error: - type: string code: type: integer format: int32 @@ -15315,7 +15490,7 @@ definitions: items: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of the @@ -15362,19 +15537,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely used + type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -15407,6 +15580,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -15421,10 +15598,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -15439,7 +15619,6 @@ definitions: name "y.z". - JSON ==== @@ -15693,9 +15872,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -15793,9 +15973,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -15900,9 +16081,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -15946,9 +16128,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -15992,9 +16175,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -16245,7 +16429,7 @@ definitions: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of the @@ -16291,18 +16475,16 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely used type + server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above specified - type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -16334,6 +16516,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -16348,10 +16534,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -16366,7 +16555,6 @@ definitions: name "y.z". - JSON ==== @@ -16435,7 +16623,7 @@ definitions: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of the @@ -16483,19 +16671,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely used + type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -16528,6 +16714,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -16542,10 +16732,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -16561,7 +16754,6 @@ definitions: name "y.z". - JSON ==== @@ -16630,6 +16822,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vendorID: @@ -16645,6 +16838,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string productIDs: @@ -16672,6 +16866,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.dclauth.PendingAccount: @@ -16688,7 +16883,7 @@ definitions: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -16738,19 +16933,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -16783,6 +16976,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -16797,10 +16994,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -16819,7 +17019,6 @@ definitions: name "y.z". - JSON ==== @@ -16889,6 +17088,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vendorID: @@ -16904,6 +17104,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string productIDs: @@ -16917,6 +17118,10 @@ definitions: max: type: integer format: int32 + title: |- + TODO issue 99: do we need that ??? + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; zigbeealliance.distributedcomplianceledger.dclauth.PendingAccountRevocation: type: object properties: @@ -16932,6 +17137,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.dclauth.QueryAllAccountResponse: @@ -16950,7 +17156,7 @@ definitions: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -17001,19 +17207,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -17046,6 +17250,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -17060,10 +17268,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -17082,7 +17293,6 @@ definitions: name "y.z". - JSON ==== @@ -17155,6 +17365,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vendorID: @@ -17170,6 +17381,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string productIDs: @@ -17189,9 +17401,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -17227,7 +17440,7 @@ definitions: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the @@ -17278,19 +17491,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no + widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the - above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -17323,6 +17534,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -17337,10 +17552,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -17359,7 +17577,6 @@ definitions: name "y.z". - JSON ==== @@ -17434,6 +17651,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vendorID: @@ -17449,6 +17669,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string productIDs: @@ -17462,15 +17685,20 @@ definitions: max: type: integer format: int32 + title: |- + TODO issue 99: do we need that ??? + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; pagination: type: object properties: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -17507,6 +17735,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string pagination: @@ -17515,9 +17744,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -17553,7 +17783,7 @@ definitions: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the @@ -17604,19 +17834,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no + widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the - above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -17649,6 +17877,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -17663,10 +17895,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -17685,7 +17920,6 @@ definitions: name "y.z". - JSON ==== @@ -17760,6 +17994,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vendorID: @@ -17775,6 +18012,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string productIDs: @@ -17794,9 +18034,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -17832,7 +18073,7 @@ definitions: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the @@ -17883,19 +18124,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no + widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the - above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -17928,6 +18167,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -17942,10 +18185,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -17964,7 +18210,6 @@ definitions: name "y.z". - JSON ==== @@ -18039,6 +18284,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vendorID: @@ -18054,6 +18302,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string productIDs: @@ -18077,6 +18328,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string reason: @@ -18091,9 +18343,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -18124,7 +18377,7 @@ definitions: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -18174,19 +18427,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -18219,6 +18470,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -18233,10 +18488,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -18255,7 +18513,6 @@ definitions: name "y.z". - JSON ==== @@ -18325,6 +18582,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vendorID: @@ -18340,6 +18598,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string productIDs: @@ -18379,7 +18638,7 @@ definitions: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type @@ -18430,19 +18689,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no + widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the - above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -18475,6 +18732,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -18489,10 +18750,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -18511,7 +18775,6 @@ definitions: name "y.z". - JSON ==== @@ -18584,6 +18847,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vendorID: @@ -18599,6 +18863,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string productIDs: @@ -18612,6 +18877,10 @@ definitions: max: type: integer format: int32 + title: |- + TODO issue 99: do we need that ??? + option (gogoproto.goproto_getters) = false; + option (gogoproto.goproto_stringer) = false; zigbeealliance.distributedcomplianceledger.dclauth.QueryGetPendingAccountRevocationResponse: type: object properties: @@ -18630,6 +18899,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.dclauth.QueryGetRejectedAccountResponse: @@ -18649,7 +18919,7 @@ definitions: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type @@ -18700,19 +18970,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no + widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the - above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -18745,6 +19013,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -18759,10 +19031,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -18781,7 +19056,6 @@ definitions: name "y.z". - JSON ==== @@ -18854,6 +19128,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vendorID: @@ -18869,6 +19144,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string productIDs: @@ -18899,7 +19175,7 @@ definitions: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type @@ -18950,19 +19226,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no + widely used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the - above specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -18995,6 +19269,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -19009,10 +19287,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -19031,7 +19312,6 @@ definitions: name "y.z". - JSON ==== @@ -19104,6 +19384,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vendorID: @@ -19119,6 +19400,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string productIDs: @@ -19142,6 +19424,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string reason: @@ -19164,7 +19447,7 @@ definitions: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -19214,19 +19497,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -19259,6 +19540,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -19273,10 +19558,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -19295,7 +19583,6 @@ definitions: name "y.z". - JSON ==== @@ -19365,6 +19652,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vendorID: @@ -19380,6 +19668,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string productIDs: @@ -19407,7 +19696,7 @@ definitions: pub_key: type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -19457,19 +19746,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -19502,6 +19789,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -19516,10 +19807,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -19538,7 +19832,6 @@ definitions: name "y.z". - JSON ==== @@ -19608,6 +19901,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vendorID: @@ -19623,6 +19917,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string productIDs: @@ -19646,6 +19941,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string reason: @@ -19696,9 +19992,7 @@ definitions: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: |- @@ -19714,7 +20008,7 @@ definitions: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of the @@ -19760,18 +20054,16 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely used type + server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above specified - type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -19816,9 +20108,7 @@ definitions: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -19836,7 +20126,7 @@ definitions: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of the @@ -19884,19 +20174,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely used + type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -19912,6 +20200,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -19924,6 +20213,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.dclupgrade.Grant: @@ -19934,6 +20224,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.dclupgrade.ProposedUpgrade: @@ -19977,9 +20268,7 @@ definitions: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -19997,7 +20286,7 @@ definitions: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of the @@ -20045,19 +20334,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely used + type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -20073,6 +20360,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -20085,6 +20373,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.dclupgrade.QueryAllApprovedUpgradeResponse: @@ -20133,9 +20422,7 @@ definitions: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -20154,7 +20441,7 @@ definitions: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -20205,19 +20492,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -20233,6 +20518,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -20245,6 +20531,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string pagination: @@ -20253,9 +20540,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -20318,9 +20606,7 @@ definitions: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -20339,7 +20625,7 @@ definitions: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -20390,19 +20676,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -20418,6 +20702,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -20430,6 +20715,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string pagination: @@ -20438,9 +20724,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -20503,9 +20790,7 @@ definitions: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -20524,7 +20809,7 @@ definitions: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -20575,19 +20860,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -20603,6 +20886,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -20615,6 +20899,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string pagination: @@ -20623,9 +20908,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -20686,9 +20972,7 @@ definitions: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -20706,7 +20990,7 @@ definitions: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -20756,19 +21040,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -20784,6 +21066,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -20796,6 +21079,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.dclupgrade.QueryGetProposedUpgradeResponse: @@ -20842,9 +21126,7 @@ definitions: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -20862,7 +21144,7 @@ definitions: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -20912,19 +21194,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -20940,6 +21220,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -20952,6 +21233,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.dclupgrade.QueryGetRejectedUpgradeResponse: @@ -20998,9 +21280,7 @@ definitions: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -21018,7 +21298,7 @@ definitions: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of @@ -21068,19 +21348,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -21096,6 +21374,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -21108,6 +21387,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.dclupgrade.RejectedUpgrade: @@ -21151,9 +21431,7 @@ definitions: height: type: string format: int64 - description: |- - The height at which the upgrade must be performed. - Only used if Time is not set. + description: The height at which the upgrade must be performed. info: type: string title: >- @@ -21171,7 +21449,7 @@ definitions: If this field is not empty, an error will be thrown. type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of the @@ -21219,19 +21497,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely used + type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- Plan specifies information about a planned upgrade and when it should occur. @@ -21247,6 +21523,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -21259,6 +21536,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.model.Model: @@ -21442,9 +21720,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -21660,6 +21939,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string subjectAsText: @@ -21674,6 +21954,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vid: @@ -21741,6 +22022,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string subjectAsText: @@ -21755,6 +22037,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vid: @@ -21796,6 +22079,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.pki.NocIcaCertificates: @@ -21839,6 +22123,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string subjectAsText: @@ -21853,6 +22138,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vid: @@ -21904,6 +22190,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string subjectAsText: @@ -21918,6 +22205,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vid: @@ -22098,6 +22386,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string subjectAsText: @@ -22112,6 +22401,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vid: @@ -22140,6 +22430,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string subjectAsText: @@ -22198,6 +22489,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string subjectAsText: @@ -22212,6 +22506,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vid: @@ -22231,9 +22528,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -22296,6 +22594,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string subjectAsText: @@ -22310,6 +22611,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vid: @@ -22326,9 +22630,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -22391,6 +22696,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string subjectAsText: @@ -22405,6 +22713,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vid: @@ -22421,9 +22732,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -22486,9 +22798,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -22533,6 +22846,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string subjectAsText: @@ -22547,6 +22861,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vid: @@ -22564,9 +22879,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -22605,6 +22921,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string subjectAsText: @@ -22622,9 +22939,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -22688,6 +23006,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string subjectAsText: @@ -22702,6 +23023,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vid: @@ -22721,9 +23045,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -22787,6 +23112,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string subjectAsText: @@ -22801,6 +23129,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vid: @@ -22820,9 +23151,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -22886,6 +23218,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string subjectAsText: @@ -22900,6 +23235,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vid: @@ -22916,9 +23254,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -22992,6 +23331,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string subjectAsText: @@ -23006,6 +23348,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vid: @@ -23097,6 +23442,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string subjectAsText: @@ -23111,6 +23459,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vid: @@ -23238,6 +23589,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string subjectAsText: @@ -23252,6 +23606,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vid: @@ -23371,6 +23728,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string subjectAsText: @@ -23385,6 +23743,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vid: @@ -23416,6 +23775,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string subjectAsText: @@ -23472,6 +23832,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string subjectAsText: @@ -23486,6 +23849,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vid: @@ -23544,6 +23910,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string subjectAsText: @@ -23558,6 +23927,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vid: @@ -23616,6 +23988,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string subjectAsText: @@ -23630,6 +24005,9 @@ definitions: time: type: string format: int64 + title: >- + number of nanoseconds elapsed since January 1, 1970 + UTC info: type: string vid: @@ -23697,6 +24075,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string subjectAsText: @@ -23711,6 +24090,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vid: @@ -23766,6 +24146,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string subjectAsText: @@ -23780,6 +24161,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vid: @@ -23835,6 +24217,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string subjectAsText: @@ -23849,6 +24232,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string vid: @@ -23903,6 +24287,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string disabledByNodeAdmin: @@ -23917,6 +24302,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.validator.Grant: @@ -23927,6 +24313,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.validator.LastValidatorPower: @@ -23954,6 +24341,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -23966,6 +24354,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.validator.QueryAllDisabledValidatorResponse: @@ -23990,6 +24379,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string disabledByNodeAdmin: @@ -24004,6 +24394,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string pagination: @@ -24012,9 +24403,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -24050,9 +24442,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -24091,6 +24484,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -24103,6 +24497,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string pagination: @@ -24111,9 +24506,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -24152,6 +24548,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -24164,6 +24561,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string pagination: @@ -24172,9 +24570,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -24219,9 +24618,10 @@ definitions: type: string description: optional details. pubKey: + title: the consensus public key of the tendermint validator type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of the @@ -24271,19 +24671,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely + used type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -24316,6 +24714,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -24330,10 +24732,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -24351,7 +24756,6 @@ definitions: name "y.z". - JSON ==== @@ -24387,7 +24791,6 @@ definitions: "@type": "type.googleapis.com/google.protobuf.Duration", "value": "1.212s" } - title: the consensus public key of the tendermint validator power: type: integer format: int32 @@ -24404,9 +24807,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 @@ -24443,6 +24847,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string disabledByNodeAdmin: @@ -24457,6 +24862,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.validator.QueryGetLastValidatorPowerResponse: @@ -24490,6 +24896,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -24502,6 +24909,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.validator.QueryGetRejectedDisableValidatorResponse: @@ -24524,6 +24932,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -24536,6 +24945,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.validator.QueryGetValidatorResponse: @@ -24564,9 +24974,10 @@ definitions: type: string description: optional details. pubKey: + title: the consensus public key of the tendermint validator type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of the @@ -24614,19 +25025,17 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely used + type server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above - specified type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -24659,6 +25068,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -24673,10 +25086,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -24692,7 +25108,6 @@ definitions: name "y.z". - JSON ==== @@ -24728,7 +25143,6 @@ definitions: "@type": "type.googleapis.com/google.protobuf.Duration", "value": "1.212s" } - title: the consensus public key of the tendermint validator power: type: integer format: int32 @@ -24756,6 +25170,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string rejects: @@ -24768,6 +25183,7 @@ definitions: time: type: string format: int64 + title: number of nanoseconds elapsed since January 1, 1970 UTC info: type: string zigbeealliance.distributedcomplianceledger.validator.Validator: @@ -24793,9 +25209,10 @@ definitions: type: string description: optional details. pubKey: + title: the consensus public key of the tendermint validator type: object properties: - type_url: + '@type': type: string description: >- A URL/resource name that uniquely identifies the type of the @@ -24841,18 +25258,16 @@ definitions: protobuf release, and it is not used for type URLs beginning with - type.googleapis.com. + type.googleapis.com. As of May 2023, there are no widely used type + server + + implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the above specified - type. + additionalProperties: {} description: >- `Any` contains an arbitrary serialized protocol buffer message along with a @@ -24884,6 +25299,10 @@ definitions: if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } Example 3: Pack and unpack a message in Python. @@ -24898,10 +25317,13 @@ definitions: Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} - any, err := ptypes.MarshalAny(foo) + any, err := anypb.New(foo) + if err != nil { + ... + } ... foo := &pb.Foo{} - if err := ptypes.UnmarshalAny(any, foo); err != nil { + if err := any.UnmarshalTo(foo); err != nil { ... } @@ -24916,7 +25338,6 @@ definitions: name "y.z". - JSON ==== @@ -24951,7 +25372,6 @@ definitions: "@type": "type.googleapis.com/google.protobuf.Duration", "value": "1.212s" } - title: the consensus public key of the tendermint validator power: type: integer format: int32 @@ -24992,9 +25412,10 @@ definitions: next_key: type: string format: byte - title: |- + description: |- next_key is the key to be passed to PageRequest.key to - query the next page most efficiently + query the next page most efficiently. It will be empty if + there are no more results. total: type: string format: uint64 diff --git a/docs/transactions.md b/docs/transactions.md index 88aaabd6b..d808f8387 100644 --- a/docs/transactions.md +++ b/docs/transactions.md @@ -49,7 +49,8 @@ an Account or sign the request. - Build transaction by CLI 1: `dcld tx ... --generate-only` - Fetch `account number` and `sequence` by CLI 1: `dcld query auth account --address
` - Sign transaction by CLI 2: `dcld tx sign txn.json --from --account-number --sequence --gas "auto" --offline --output-document txn.json` - - Broadcat transaction by CLI 1: `dcld tx broadcast txn.json` + - Broadcast transaction by CLI 1: `dcld tx broadcast txn.json` + - To get the actual result of transaction, `dcld query tx=txHash` call must be executed, where `txHash` is the hash of previously executed transaction. - gRPC: - Generate a client code from the proto files [proto](../proto) for the client language (see ) - Build, sign, and broadcast the message (transaction). @@ -96,7 +97,7 @@ Please make sure that TLS is enabled in gRPC, REST or Light Client Proxy for sec - Tendermint RPC supports state proofs. Tendermint's Light Client library can be used to verify the state proofs. So, if Light Client API is used, then it's possible to communicate with non-trusted nodes. - Please note, that multi-value queries don't have state proofs support and should be sent to trusted nodes only. - - Refer to [this doc](./tendermint-rpc.md) to see how to [subscribe](./tendermint-rpc.md#subscribe) to a Tendermint WebSocket based events and/or [query](./tendermint-rpc.md#querying-application-components) an application components. + - Refer to [this doc](./cometbft-rpc.md) to see how to [subscribe](./cometbft-rpc.md#subscribe) to a Tendermint WebSocket based events and/or [query](./cometbft-rpc.md#querying-application-components) an application components. `NotFound` (404 code) is returned if an entry is not found on the ledger. diff --git a/genlocalnetappbins.sh b/genlocalnetappbins.sh index 93c69610a..ef2e9cb29 100755 --- a/genlocalnetappbins.sh +++ b/genlocalnetappbins.sh @@ -11,7 +11,7 @@ docker container create --name ${IMAGE_TAG}-inst ${IMAGE_TAG} if [ -n "$MAINNET_STABLE_VERSION" ]; then wget "https://github.com/zigbee-alliance/distributed-compliance-ledger/releases/download/${MAINNET_STABLE_VERSION}/dcld" - chmod ugo+x dcld + chmod ugo+x dcld fi for node_name in node0 node1 node2 node3 observer0 lightclient0; do diff --git a/genlocalnetconfig.sh b/genlocalnetconfig.sh index 50d103ce0..26fa23ba2 100755 --- a/genlocalnetconfig.sh +++ b/genlocalnetconfig.sh @@ -53,7 +53,7 @@ $DCL_BINARY config chain-id "$CHAIN_ID" $DCL_BINARY config output json $DCL_BINARY config node "tcp://localhost:26657" $DCL_BINARY config keyring-backend test -$DCL_BINARY config broadcast-mode block +$DCL_BINARY config broadcast-mode sync (echo "$KEYPASSWD"; echo "$KEYPASSWD") | $DCL_BINARY keys add jack (echo "$KEYPASSWD"; echo "$KEYPASSWD") | $DCL_BINARY keys add alice @@ -173,6 +173,8 @@ for node_name in node0 node1 node2 node3 observer0; do if [[ -d "$LOCALNET_DIR/${node_name}" ]]; then # Make RPC endpoints available externally sed -i $SED_EXT 's/laddr = "tcp:\/\/127.0.0.1:26657"/laddr = "tcp:\/\/0.0.0.0:26657"/g' "$LOCALNET_DIR/${node_name}/config/config.toml" + # Make REST endpoints available externally + sed -i $SED_EXT 's/address = "tcp:\/\/localhost:1317"/address = "tcp:\/\/0.0.0.0:1317"/g' "$LOCALNET_DIR/${node_name}/config/app.toml" # Make REST endpoints available externally sed -i $SED_EXT 's/address = "tcp:\/\/localhost:1317"/address = "tcp:\/\/0.0.0.0:1317"/g' "$LOCALNET_DIR/${node_name}/config/app.toml" @@ -202,9 +204,9 @@ function init_light_client_proxy { $DCL_BINARY config chain-id "$CHAIN_ID" $DCL_BINARY config node "tcp://localhost:26657" - $DCL_BINARY config broadcast-mode block + $DCL_BINARY config broadcast-mode sync $DCL_BINARY config keyring-backend test - $DCL_BINARY config broadcast-mode block + $DCL_BINARY config broadcast-mode sync cp -R "$LOCALNET_DIR"/client/* "$DCL_DIR" @@ -216,6 +218,7 @@ if [[ -n "$DCL_LIGHT_CLIENT_PROXY" ]]; then init_light_client_proxy lightclient0 fi +chmod 777 -R $DCL_DIR #if [ -n "$MAINNET_STABLE_VERSION" ]; then # rm dcld #fi \ No newline at end of file diff --git a/go.mod b/go.mod index ab0ed06a8..67be9cc33 100644 --- a/go.mod +++ b/go.mod @@ -1,38 +1,226 @@ module github.com/zigbee-alliance/distributed-compliance-ledger -go 1.16 +go 1.20 require ( - github.com/cosmos/cosmos-proto v0.0.0-20210914142853-23ed61ac79ce - github.com/cosmos/cosmos-sdk v0.44.5 + cosmossdk.io/api v0.3.1 + cosmossdk.io/core v0.5.1 + cosmossdk.io/errors v1.0.1 + cosmossdk.io/math v1.2.0 + github.com/bufbuild/buf v1.7.0 + github.com/cometbft/cometbft v0.37.4 + github.com/cometbft/cometbft-db v0.8.0 + github.com/cosmos/cosmos-proto v1.0.0-beta.2 + github.com/cosmos/cosmos-sdk v0.47.8 github.com/cosmos/go-bip39 v1.0.0 - github.com/go-playground/locales v0.14.0 - github.com/go-playground/universal-translator v0.18.0 - github.com/go-playground/validator/v10 v10.9.0 - github.com/gogo/protobuf v1.3.3 + github.com/cosmos/gogoproto v1.4.10 + github.com/go-playground/locales v0.14.1 + github.com/go-playground/universal-translator v0.18.1 + github.com/go-playground/validator/v10 v10.11.2 github.com/golang/protobuf v1.5.3 github.com/gorilla/mux v1.8.0 github.com/grpc-ecosystem/grpc-gateway v1.16.0 - github.com/kr/pretty v0.3.1 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0 github.com/pkg/errors v0.9.1 - github.com/spf13/cast v1.3.1 - github.com/spf13/cobra v1.2.1 + github.com/spf13/cast v1.5.0 + github.com/spf13/cobra v1.6.1 github.com/spf13/pflag v1.0.5 - github.com/spf13/viper v1.8.1 - github.com/stretchr/testify v1.8.2 - github.com/tendermint/spm v0.1.9 - github.com/tendermint/tendermint v0.34.14 - github.com/tendermint/tm-db v0.6.4 - golang.org/x/net v0.12.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20230724170836-66ad5b6ff146 - google.golang.org/genproto/googleapis/rpc v0.0.0-20230724170836-66ad5b6ff146 // indirect - google.golang.org/grpc v1.56.2 + github.com/spf13/viper v1.14.0 + github.com/stretchr/testify v1.8.4 + google.golang.org/genproto/googleapis/api v0.0.0-20240102182953-50ed04b92917 + google.golang.org/grpc v1.60.1 + google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 + google.golang.org/protobuf v1.32.0 sigs.k8s.io/yaml v1.3.0 ) +require ( + cloud.google.com/go v0.111.0 // indirect + cloud.google.com/go/compute v1.23.3 // indirect + cloud.google.com/go/compute/metadata v0.2.3 // indirect + cloud.google.com/go/iam v1.1.5 // indirect + cloud.google.com/go/storage v1.30.1 // indirect + cosmossdk.io/depinject v1.0.0-alpha.4 // indirect + cosmossdk.io/log v1.3.0 // indirect + cosmossdk.io/tools/rosetta v0.2.1 // indirect + filippo.io/edwards25519 v1.0.0 // indirect + github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect + github.com/99designs/keyring v1.2.1 // indirect + github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect + github.com/ChainSafe/go-schnorrkel v1.0.0 // indirect + github.com/Microsoft/go-winio v0.6.0 // indirect + github.com/armon/go-metrics v0.4.1 // indirect + github.com/aws/aws-sdk-go v1.44.203 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect + github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 // indirect + github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect + github.com/bufbuild/connect-go v1.0.0 // indirect + github.com/bufbuild/protocompile v0.4.0 // indirect + github.com/cenkalti/backoff/v4 v4.1.3 // indirect + github.com/cespare/xxhash v1.1.0 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/chzyer/readline v1.5.1 // indirect + github.com/cockroachdb/errors v1.10.0 // indirect + github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b // indirect + github.com/cockroachdb/redact v1.1.5 // indirect + github.com/coinbase/rosetta-sdk-go/types v1.0.0 // indirect + github.com/confio/ics23/go v0.9.0 // indirect + github.com/containerd/containerd v1.6.8 // indirect + github.com/containerd/typeurl v1.0.2 // indirect + github.com/cosmos/btcutil v1.0.5 // indirect + github.com/cosmos/gogogateway v1.2.0 // indirect + github.com/cosmos/iavl v0.20.1 // indirect + github.com/cosmos/ledger-cosmos-go v0.12.4 // indirect + github.com/cosmos/rosetta-sdk-go v0.10.0 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect + github.com/creachadair/taskgroup v0.3.2 // indirect + github.com/danieljoos/wincred v1.1.2 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect + github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect + github.com/dgraph-io/badger/v2 v2.2007.4 // indirect + github.com/dgraph-io/ristretto v0.1.1 // indirect + github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 // indirect + github.com/docker/distribution v2.8.1+incompatible // indirect + github.com/docker/docker v20.10.19+incompatible // indirect + github.com/docker/go-connections v0.4.0 // indirect + github.com/docker/go-units v0.5.0 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/dvsekhvalnov/jose2go v1.5.0 // indirect + github.com/felixge/httpsnoop v1.0.2 // indirect + github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/getsentry/sentry-go v0.23.0 // indirect + github.com/ghodss/yaml v1.0.0 // indirect + github.com/go-chi/chi/v5 v5.0.7 // indirect + github.com/go-kit/kit v0.12.0 // indirect + github.com/go-kit/log v0.2.1 // indirect + github.com/go-logfmt/logfmt v0.5.1 // indirect + github.com/go-logr/logr v1.2.4 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect + github.com/gofrs/flock v0.8.1 // indirect + github.com/gofrs/uuid v4.3.0+incompatible // indirect + github.com/gogo/googleapis v1.4.1 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/glog v1.1.2 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/mock v1.6.0 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/google/btree v1.1.2 // indirect + github.com/google/go-cmp v0.6.0 // indirect + github.com/google/orderedcode v0.0.1 // indirect + github.com/google/s2a-go v0.1.7 // indirect + github.com/google/uuid v1.4.0 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect + github.com/googleapis/gax-go/v2 v2.12.0 // indirect + github.com/gorilla/handlers v1.5.1 // indirect + github.com/gorilla/websocket v1.5.0 // indirect + github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 // indirect + github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect + github.com/gtank/merlin v0.1.1 // indirect + github.com/gtank/ristretto255 v0.1.2 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/go-getter v1.7.1 // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-safetemp v1.0.0 // indirect + github.com/hashicorp/go-version v1.6.0 // indirect + github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/hdevalence/ed25519consensus v0.1.0 // indirect + github.com/huandu/skiplist v1.2.0 // indirect + github.com/improbable-eng/grpc-web v0.15.0 // indirect + github.com/inconshreveable/mousetrap v1.0.1 // indirect + github.com/jdxcode/netrc v0.0.0-20210204082910-926c7f70242a // indirect + github.com/jhump/protocompile v0.0.0-20220216033700-d705409f108f // indirect + github.com/jhump/protoreflect v1.15.1 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/jmhodges/levigo v1.0.0 // indirect + github.com/klauspost/compress v1.16.7 // indirect + github.com/klauspost/pgzip v1.2.5 // indirect + github.com/kr/pretty v0.3.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/leodido/go-urn v1.2.1 // indirect + github.com/lib/pq v1.10.7 // indirect + github.com/libp2p/go-buffer-pool v0.1.0 // indirect + github.com/linxGnu/grocksdb v1.7.16 // indirect + github.com/magiconair/properties v1.8.6 // indirect + github.com/manifoldco/promptui v0.9.0 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect + github.com/mimoo/StrobeGo v0.0.0-20210601165009-122bf33a46e0 // indirect + github.com/minio/highwayhash v1.0.2 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/mitchellh/go-testing-interface v1.14.1 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/moby/buildkit v0.10.4 // indirect + github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae // indirect + github.com/morikuni/aec v1.0.0 // indirect + github.com/mtibben/percent v0.2.1 // indirect + github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.0-rc2 // indirect + github.com/pelletier/go-toml v1.9.5 // indirect + github.com/pelletier/go-toml/v2 v2.0.7 // indirect + github.com/petermattis/goid v0.0.0-20230317030725-371a4b8eda08 // indirect + github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect + github.com/pkg/profile v1.6.0 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/prometheus/client_golang v1.14.0 // indirect + github.com/prometheus/client_model v0.3.0 // indirect + github.com/prometheus/common v0.42.0 // indirect + github.com/prometheus/procfs v0.9.0 // indirect + github.com/rakyll/statik v0.1.7 // indirect + github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect + github.com/rogpeppe/go-internal v1.11.0 // indirect + github.com/rs/cors v1.8.2 // indirect + github.com/rs/zerolog v1.31.0 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/sasha-s/go-deadlock v0.3.1 // indirect + github.com/sirupsen/logrus v1.9.0 // indirect + github.com/spf13/afero v1.9.2 // indirect + github.com/spf13/jwalterweatherman v1.1.0 // indirect + github.com/stretchr/objx v0.5.0 // indirect + github.com/subosito/gotenv v1.4.1 // indirect + github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d // indirect + github.com/tendermint/go-amino v0.16.0 // indirect + github.com/tidwall/btree v1.6.0 // indirect + github.com/ulikunitz/xz v0.5.11 // indirect + github.com/zondax/hid v0.9.2 // indirect + github.com/zondax/ledger-go v0.14.3 // indirect + go.etcd.io/bbolt v1.3.7 // indirect + go.opencensus.io v0.24.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.36.3 // indirect + go.opentelemetry.io/otel v1.19.0 // indirect + go.opentelemetry.io/otel/metric v1.19.0 // indirect + go.opentelemetry.io/otel/trace v1.19.0 // indirect + go.uber.org/atomic v1.10.0 // indirect + go.uber.org/multierr v1.8.0 // indirect + go.uber.org/zap v1.23.0 // indirect + golang.org/x/crypto v0.16.0 // indirect + golang.org/x/exp v0.0.0-20230711153332-06a737ee72cb // indirect + golang.org/x/mod v0.11.0 // indirect + golang.org/x/net v0.19.0 // indirect + golang.org/x/oauth2 v0.15.0 // indirect + golang.org/x/sync v0.4.0 // indirect + golang.org/x/sys v0.16.0 // indirect + golang.org/x/term v0.15.0 // indirect + golang.org/x/text v0.14.0 // indirect + golang.org/x/tools v0.6.0 // indirect + google.golang.org/api v0.149.0 // indirect + google.golang.org/appengine v1.6.8 // indirect + google.golang.org/genproto v0.0.0-20240102182953-50ed04b92917 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240108191215-35c7eff3a6b1 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + nhooyr.io/websocket v1.8.6 // indirect + pgregory.net/rapid v0.5.5 // indirect +) + replace ( - github.com/99designs/keyring => github.com/cosmos/keyring v1.1.7-0.20210622111912-ef00f8ac3d76 - github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1 - github.com/tendermint/tendermint => github.com/zigbee-alliance/tendermint v0.34.140 - google.golang.org/grpc => google.golang.org/grpc v1.33.2 + github.com/99designs/keyring => github.com/cosmos/keyring v1.2.0 + github.com/cometbft/cometbft => github.com/zigbee-alliance/cometbft v0.37.5 + github.com/gin-gonic/gin => github.com/gin-gonic/gin v1.9.0 + github.com/syndtr/goleveldb => github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 ) diff --git a/go.sum b/go.sum index 1fca563f1..a52944a9c 100644 --- a/go.sum +++ b/go.sum @@ -1,5 +1,4 @@ -4d63.com/gochecknoglobals v0.0.0-20201008074935-acfc0b28355a/go.mod h1:wfdC5ZjKSPr7CybKEcgJhUOgeAQW1+7WcyK8OvUilfo= -bitbucket.org/creachadair/shell v0.0.6/go.mod h1:8Qqi/cYk7vPnsOePHroKXDJYmb5x7ENhtiFtfZq8K+M= +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= @@ -13,7 +12,6 @@ cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6 cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.60.0/go.mod h1:yw2G51M9IfRboUH61Us8GqCeF1PzPblB823Mn2q2eAU= cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= @@ -30,107 +28,28 @@ cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+Y cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= -cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U= cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= cloud.google.com/go v0.102.0/go.mod h1:oWcCzKlqJ5zgHQt9YsaeTY9KzIvjyy0ArmiBUgpQ+nc= cloud.google.com/go v0.102.1/go.mod h1:XZ77E9qnTEnrgEOvr4xzfdX5TRo7fB4T2F4O6+34hIU= cloud.google.com/go v0.104.0/go.mod h1:OO6xxXdJyvuJPcEPBLN9BJPD+jep5G1+2U5B5gkRYtA= -cloud.google.com/go v0.105.0/go.mod h1:PrLgOJNe5nfE9UMxKxgXj4mD3voiP+YQ6gdt6KMFOKM= -cloud.google.com/go v0.107.0/go.mod h1:wpc2eNrD7hXUTy8EKS10jkxpZBjASrORK7goS+3YX2I= -cloud.google.com/go v0.110.0/go.mod h1:SJnCLqQ0FCFGSZMUNUf84MV3Aia54kn7pi8st7tMzaY= -cloud.google.com/go v0.110.2/go.mod h1:k04UEeEtb6ZBRTv3dZz4CeJC3jKGxyhl0sAiVVquxiw= -cloud.google.com/go v0.110.4/go.mod h1:+EYjdK8e5RME/VY/qLCAtuyALQ9q67dvuum8i+H5xsI= -cloud.google.com/go/accessapproval v1.4.0/go.mod h1:zybIuC3KpDOvotz59lFe5qxRZx6C75OtwbisN56xYB4= -cloud.google.com/go/accessapproval v1.5.0/go.mod h1:HFy3tuiGvMdcd/u+Cu5b9NkO1pEICJ46IR82PoUdplw= -cloud.google.com/go/accessapproval v1.6.0/go.mod h1:R0EiYnwV5fsRFiKZkPHr6mwyk2wxUJ30nL4j2pcFY2E= -cloud.google.com/go/accessapproval v1.7.1/go.mod h1:JYczztsHRMK7NTXb6Xw+dwbs/WnOJxbo/2mTI+Kgg68= -cloud.google.com/go/accesscontextmanager v1.3.0/go.mod h1:TgCBehyr5gNMz7ZaH9xubp+CE8dkrszb4oK9CWyvD4o= -cloud.google.com/go/accesscontextmanager v1.4.0/go.mod h1:/Kjh7BBu/Gh83sv+K60vN9QE5NJcd80sU33vIe2IFPE= -cloud.google.com/go/accesscontextmanager v1.6.0/go.mod h1:8XCvZWfYw3K/ji0iVnp+6pu7huxoQTLmxAbVjbloTtM= -cloud.google.com/go/accesscontextmanager v1.7.0/go.mod h1:CEGLewx8dwa33aDAZQujl7Dx+uYhS0eay198wB/VumQ= -cloud.google.com/go/accesscontextmanager v1.8.0/go.mod h1:uI+AI/r1oyWK99NN8cQ3UK76AMelMzgZCvJfsi2c+ps= -cloud.google.com/go/accesscontextmanager v1.8.1/go.mod h1:JFJHfvuaTC+++1iL1coPiG1eu5D24db2wXCDWDjIrxo= +cloud.google.com/go v0.111.0 h1:YHLKNupSD1KqjDbQ3+LVdQ81h/UJbJyZG203cEfnQgM= +cloud.google.com/go v0.111.0/go.mod h1:0mibmpKP1TyOOFYQY5izo0LnT+ecvOQ0Sg3OdmMiNRU= cloud.google.com/go/aiplatform v1.22.0/go.mod h1:ig5Nct50bZlzV6NvKaTwmplLLddFx0YReh9WfTO5jKw= cloud.google.com/go/aiplatform v1.24.0/go.mod h1:67UUvRBKG6GTayHKV8DBv2RtR1t93YRu5B1P3x99mYY= -cloud.google.com/go/aiplatform v1.27.0/go.mod h1:Bvxqtl40l0WImSb04d0hXFU7gDOiq9jQmorivIiWcKg= -cloud.google.com/go/aiplatform v1.35.0/go.mod h1:7MFT/vCaOyZT/4IIFfxH4ErVg/4ku6lKv3w0+tFTgXQ= -cloud.google.com/go/aiplatform v1.36.1/go.mod h1:WTm12vJRPARNvJ+v6P52RDHCNe4AhvjcIZ/9/RRHy/k= -cloud.google.com/go/aiplatform v1.37.0/go.mod h1:IU2Cv29Lv9oCn/9LkFiiuKfwrRTq+QQMbW+hPCxJGZw= -cloud.google.com/go/aiplatform v1.45.0/go.mod h1:Iu2Q7sC7QGhXUeOhAj/oCK9a+ULz1O4AotZiqjQ8MYA= cloud.google.com/go/analytics v0.11.0/go.mod h1:DjEWCu41bVbYcKyvlws9Er60YE4a//bK6mnhWvQeFNI= cloud.google.com/go/analytics v0.12.0/go.mod h1:gkfj9h6XRf9+TS4bmuhPEShsh3hH8PAZzm/41OOhQd4= -cloud.google.com/go/analytics v0.17.0/go.mod h1:WXFa3WSym4IZ+JiKmavYdJwGG/CvpqiqczmL59bTD9M= -cloud.google.com/go/analytics v0.18.0/go.mod h1:ZkeHGQlcIPkw0R/GW+boWHhCOR43xz9RN/jn7WcqfIE= -cloud.google.com/go/analytics v0.19.0/go.mod h1:k8liqf5/HCnOUkbawNtrWWc+UAzyDlW89doe8TtoDsE= -cloud.google.com/go/analytics v0.21.2/go.mod h1:U8dcUtmDmjrmUTnnnRnI4m6zKn/yaA5N9RlEkYFHpQo= -cloud.google.com/go/apigateway v1.3.0/go.mod h1:89Z8Bhpmxu6AmUxuVRg/ECRGReEdiP3vQtk4Z1J9rJk= -cloud.google.com/go/apigateway v1.4.0/go.mod h1:pHVY9MKGaH9PQ3pJ4YLzoj6U5FUDeDFBllIz7WmzJoc= -cloud.google.com/go/apigateway v1.5.0/go.mod h1:GpnZR3Q4rR7LVu5951qfXPJCHquZt02jf7xQx7kpqN8= -cloud.google.com/go/apigateway v1.6.1/go.mod h1:ufAS3wpbRjqfZrzpvLC2oh0MFlpRJm2E/ts25yyqmXA= -cloud.google.com/go/apigeeconnect v1.3.0/go.mod h1:G/AwXFAKo0gIXkPTVfZDd2qA1TxBXJ3MgMRBQkIi9jc= -cloud.google.com/go/apigeeconnect v1.4.0/go.mod h1:kV4NwOKqjvt2JYR0AoIWo2QGfoRtn/pkS3QlHp0Ni04= -cloud.google.com/go/apigeeconnect v1.5.0/go.mod h1:KFaCqvBRU6idyhSNyn3vlHXc8VMDJdRmwDF6JyFRqZ8= -cloud.google.com/go/apigeeconnect v1.6.1/go.mod h1:C4awq7x0JpLtrlQCr8AzVIzAaYgngRqWf9S5Uhg+wWs= -cloud.google.com/go/apigeeregistry v0.4.0/go.mod h1:EUG4PGcsZvxOXAdyEghIdXwAEi/4MEaoqLMLDMIwKXY= -cloud.google.com/go/apigeeregistry v0.5.0/go.mod h1:YR5+s0BVNZfVOUkMa5pAR2xGd0A473vA5M7j247o1wM= -cloud.google.com/go/apigeeregistry v0.6.0/go.mod h1:BFNzW7yQVLZ3yj0TKcwzb8n25CFBri51GVGOEUcgQsc= -cloud.google.com/go/apigeeregistry v0.7.1/go.mod h1:1XgyjZye4Mqtw7T9TsY4NW10U7BojBvG4RMD+vRDrIw= -cloud.google.com/go/apikeys v0.4.0/go.mod h1:XATS/yqZbaBK0HOssf+ALHp8jAlNHUgyfprvNcBIszU= -cloud.google.com/go/apikeys v0.5.0/go.mod h1:5aQfwY4D+ewMMWScd3hm2en3hCj+BROlyrt3ytS7KLI= -cloud.google.com/go/apikeys v0.6.0/go.mod h1:kbpXu5upyiAlGkKrJgQl8A0rKNNJ7dQ377pdroRSSi8= -cloud.google.com/go/appengine v1.4.0/go.mod h1:CS2NhuBuDXM9f+qscZ6V86m1MIIqPj3WC/UoEuR1Sno= -cloud.google.com/go/appengine v1.5.0/go.mod h1:TfasSozdkFI0zeoxW3PTBLiNqRmzraodCWatWI9Dmak= -cloud.google.com/go/appengine v1.6.0/go.mod h1:hg6i0J/BD2cKmDJbaFSYHFyZkgBEfQrDg/X0V5fJn84= -cloud.google.com/go/appengine v1.7.0/go.mod h1:eZqpbHFCqRGa2aCdope7eC0SWLV1j0neb/QnMJVWx6A= -cloud.google.com/go/appengine v1.7.1/go.mod h1:IHLToyb/3fKutRysUlFO0BPt5j7RiQ45nrzEJmKTo6E= -cloud.google.com/go/appengine v1.8.1/go.mod h1:6NJXGLVhZCN9aQ/AEDvmfzKEfoYBlfB80/BHiKVputY= cloud.google.com/go/area120 v0.5.0/go.mod h1:DE/n4mp+iqVyvxHN41Vf1CR602GiHQjFPusMFW6bGR4= cloud.google.com/go/area120 v0.6.0/go.mod h1:39yFJqWVgm0UZqWTOdqkLhjoC7uFfgXRC8g/ZegeAh0= -cloud.google.com/go/area120 v0.7.0/go.mod h1:a3+8EUD1SX5RUcCs3MY5YasiO1z6yLiNLRiFrykbynY= -cloud.google.com/go/area120 v0.7.1/go.mod h1:j84i4E1RboTWjKtZVWXPqvK5VHQFJRF2c1Nm69pWm9k= -cloud.google.com/go/area120 v0.8.1/go.mod h1:BVfZpGpB7KFVNxPiQBuHkX6Ed0rS51xIgmGyjrAfzsg= cloud.google.com/go/artifactregistry v1.6.0/go.mod h1:IYt0oBPSAGYj/kprzsBjZ/4LnG/zOcHyFHjWPCi6SAQ= cloud.google.com/go/artifactregistry v1.7.0/go.mod h1:mqTOFOnGZx8EtSqK/ZWcsm/4U8B77rbcLP6ruDU2Ixk= -cloud.google.com/go/artifactregistry v1.8.0/go.mod h1:w3GQXkJX8hiKN0v+at4b0qotwijQbYUqF2GWkZzAhC0= -cloud.google.com/go/artifactregistry v1.9.0/go.mod h1:2K2RqvA2CYvAeARHRkLDhMDJ3OXy26h3XW+3/Jh2uYc= -cloud.google.com/go/artifactregistry v1.11.1/go.mod h1:lLYghw+Itq9SONbCa1YWBoWs1nOucMH0pwXN1rOBZFI= -cloud.google.com/go/artifactregistry v1.11.2/go.mod h1:nLZns771ZGAwVLzTX/7Al6R9ehma4WUEhZGWV6CeQNQ= -cloud.google.com/go/artifactregistry v1.12.0/go.mod h1:o6P3MIvtzTOnmvGagO9v/rOjjA0HmhJ+/6KAXrmYDCI= -cloud.google.com/go/artifactregistry v1.13.0/go.mod h1:uy/LNfoOIivepGhooAUpL1i30Hgee3Cu0l4VTWHUC08= -cloud.google.com/go/artifactregistry v1.14.1/go.mod h1:nxVdG19jTaSTu7yA7+VbWL346r3rIdkZ142BSQqhn5E= cloud.google.com/go/asset v1.5.0/go.mod h1:5mfs8UvcM5wHhqtSv8J1CtxxaQq3AdBxxQi2jGW/K4o= cloud.google.com/go/asset v1.7.0/go.mod h1:YbENsRK4+xTiL+Ofoj5Ckf+O17kJtgp3Y3nn4uzZz5s= cloud.google.com/go/asset v1.8.0/go.mod h1:mUNGKhiqIdbr8X7KNayoYvyc4HbbFO9URsjbytpUaW0= -cloud.google.com/go/asset v1.9.0/go.mod h1:83MOE6jEJBMqFKadM9NLRcs80Gdw76qGuHn8m3h8oHQ= -cloud.google.com/go/asset v1.10.0/go.mod h1:pLz7uokL80qKhzKr4xXGvBQXnzHn5evJAEAtZiIb0wY= -cloud.google.com/go/asset v1.11.1/go.mod h1:fSwLhbRvC9p9CXQHJ3BgFeQNM4c9x10lqlrdEUYXlJo= -cloud.google.com/go/asset v1.12.0/go.mod h1:h9/sFOa4eDIyKmH6QMpm4eUK3pDojWnUhTgJlk762Hg= -cloud.google.com/go/asset v1.13.0/go.mod h1:WQAMyYek/b7NBpYq/K4KJWcRqzoalEsxz/t/dTk4THw= -cloud.google.com/go/asset v1.14.1/go.mod h1:4bEJ3dnHCqWCDbWJ/6Vn7GVI9LerSi7Rfdi03hd+WTQ= cloud.google.com/go/assuredworkloads v1.5.0/go.mod h1:n8HOZ6pff6re5KYfBXcFvSViQjDwxFkAkmUFffJRbbY= cloud.google.com/go/assuredworkloads v1.6.0/go.mod h1:yo2YOk37Yc89Rsd5QMVECvjaMKymF9OP+QXWlKXUkXw= cloud.google.com/go/assuredworkloads v1.7.0/go.mod h1:z/736/oNmtGAyU47reJgGN+KVoYoxeLBoj4XkKYscNI= -cloud.google.com/go/assuredworkloads v1.8.0/go.mod h1:AsX2cqyNCOvEQC8RMPnoc0yEarXQk6WEKkxYfL6kGIo= -cloud.google.com/go/assuredworkloads v1.9.0/go.mod h1:kFuI1P78bplYtT77Tb1hi0FMxM0vVpRC7VVoJC3ZoT0= -cloud.google.com/go/assuredworkloads v1.10.0/go.mod h1:kwdUQuXcedVdsIaKgKTp9t0UJkE5+PAVNhdQm4ZVq2E= -cloud.google.com/go/assuredworkloads v1.11.1/go.mod h1:+F04I52Pgn5nmPG36CWFtxmav6+7Q+c5QyJoL18Lry0= cloud.google.com/go/automl v1.5.0/go.mod h1:34EjfoFGMZ5sgJ9EoLsRtdPSNZLcfflJR39VbVNS2M0= cloud.google.com/go/automl v1.6.0/go.mod h1:ugf8a6Fx+zP0D59WLhqgTDsQI9w07o64uf/Is3Nh5p8= -cloud.google.com/go/automl v1.7.0/go.mod h1:RL9MYCCsJEOmt0Wf3z9uzG0a7adTT1fe+aObgSpkCt8= -cloud.google.com/go/automl v1.8.0/go.mod h1:xWx7G/aPEe/NP+qzYXktoBSDfjO+vnKMGgsApGJJquM= -cloud.google.com/go/automl v1.12.0/go.mod h1:tWDcHDp86aMIuHmyvjuKeeHEGq76lD7ZqfGLN6B0NuU= -cloud.google.com/go/automl v1.13.1/go.mod h1:1aowgAHWYZU27MybSCFiukPO7xnyawv7pt3zK4bheQE= -cloud.google.com/go/baremetalsolution v0.3.0/go.mod h1:XOrocE+pvK1xFfleEnShBlNAXf+j5blPPxrhjKgnIFc= -cloud.google.com/go/baremetalsolution v0.4.0/go.mod h1:BymplhAadOO/eBa7KewQ0Ppg4A4Wplbn+PsFKRLo0uI= -cloud.google.com/go/baremetalsolution v0.5.0/go.mod h1:dXGxEkmR9BMwxhzBhV0AioD0ULBmuLZI8CdwalUxuss= -cloud.google.com/go/batch v0.3.0/go.mod h1:TR18ZoAekj1GuirsUsR1ZTKN3FC/4UDnScjT8NXImFE= -cloud.google.com/go/batch v0.4.0/go.mod h1:WZkHnP43R/QCGQsZ+0JyG4i79ranE2u8xvjq/9+STPE= -cloud.google.com/go/batch v0.7.0/go.mod h1:vLZN95s6teRUqRQ4s3RLDsH8PvboqBK+rn1oevL159g= -cloud.google.com/go/beyondcorp v0.2.0/go.mod h1:TB7Bd+EEtcw9PCPQhCJtJGjk/7TC6ckmnSFS+xwTfm4= -cloud.google.com/go/beyondcorp v0.3.0/go.mod h1:E5U5lcrcXMsCuoDNyGrpyTm/hn7ne941Jz2vmksAxW8= -cloud.google.com/go/beyondcorp v0.4.0/go.mod h1:3ApA0mbhHx6YImmuubf5pyW8srKnCEPON32/5hj+RmM= -cloud.google.com/go/beyondcorp v0.5.0/go.mod h1:uFqj9X+dSfrheVp7ssLTaRHd2EHqSL4QZmH4e8WXGGU= -cloud.google.com/go/beyondcorp v0.6.1/go.mod h1:YhxDWw946SCbmcWo3fAhw3V4XZMSpQ/VYfcKGAEU8/4= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -138,52 +57,12 @@ cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUM cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/bigquery v1.42.0/go.mod h1:8dRTJxhtG+vwBKzE5OseQn/hiydoQN3EedCaOdYmxRA= -cloud.google.com/go/bigquery v1.43.0/go.mod h1:ZMQcXHsl+xmU1z36G2jNGZmKp9zNY5BUua5wDgmNCfw= -cloud.google.com/go/bigquery v1.44.0/go.mod h1:0Y33VqXTEsbamHJvJHdFmtqHvMIY28aK1+dFsvaChGc= -cloud.google.com/go/bigquery v1.47.0/go.mod h1:sA9XOgy0A8vQK9+MWhEQTY6Tix87M/ZurWFIxmF9I/E= -cloud.google.com/go/bigquery v1.48.0/go.mod h1:QAwSz+ipNgfL5jxiaK7weyOhzdoAy1zFm0Nf1fysJac= -cloud.google.com/go/bigquery v1.49.0/go.mod h1:Sv8hMmTFFYBlt/ftw2uN6dFdQPzBlREY9yBh7Oy7/4Q= -cloud.google.com/go/bigquery v1.50.0/go.mod h1:YrleYEh2pSEbgTBZYMJ5SuSr0ML3ypjRB1zgf7pvQLU= -cloud.google.com/go/bigquery v1.52.0/go.mod h1:3b/iXjRQGU4nKa87cXeg6/gogLjO8C6PmuM8i5Bi/u4= cloud.google.com/go/billing v1.4.0/go.mod h1:g9IdKBEFlItS8bTtlrZdVLWSSdSyFUZKXNS02zKMOZY= cloud.google.com/go/billing v1.5.0/go.mod h1:mztb1tBc3QekhjSgmpf/CV4LzWXLzCArwpLmP2Gm88s= -cloud.google.com/go/billing v1.6.0/go.mod h1:WoXzguj+BeHXPbKfNWkqVtDdzORazmCjraY+vrxcyvI= -cloud.google.com/go/billing v1.7.0/go.mod h1:q457N3Hbj9lYwwRbnlD7vUpyjq6u5U1RAOArInEiD5Y= -cloud.google.com/go/billing v1.12.0/go.mod h1:yKrZio/eu+okO/2McZEbch17O5CB5NpZhhXG6Z766ss= -cloud.google.com/go/billing v1.13.0/go.mod h1:7kB2W9Xf98hP9Sr12KfECgfGclsH3CQR0R08tnRlRbc= -cloud.google.com/go/billing v1.16.0/go.mod h1:y8vx09JSSJG02k5QxbycNRrN7FGZB6F3CAcgum7jvGA= cloud.google.com/go/binaryauthorization v1.1.0/go.mod h1:xwnoWu3Y84jbuHa0zd526MJYmtnVXn0syOjaJgy4+dM= cloud.google.com/go/binaryauthorization v1.2.0/go.mod h1:86WKkJHtRcv5ViNABtYMhhNWRrD1Vpi//uKEy7aYEfI= -cloud.google.com/go/binaryauthorization v1.3.0/go.mod h1:lRZbKgjDIIQvzYQS1p99A7/U1JqvqeZg0wiI5tp6tg0= -cloud.google.com/go/binaryauthorization v1.4.0/go.mod h1:tsSPQrBd77VLplV70GUhBf/Zm3FsKmgSqgm4UmiDItk= -cloud.google.com/go/binaryauthorization v1.5.0/go.mod h1:OSe4OU1nN/VswXKRBmciKpo9LulY41gch5c68htf3/Q= -cloud.google.com/go/binaryauthorization v1.6.1/go.mod h1:TKt4pa8xhowwffiBmbrbcxijJRZED4zrqnwZ1lKH51U= -cloud.google.com/go/certificatemanager v1.3.0/go.mod h1:n6twGDvcUBFu9uBgt4eYvvf3sQ6My8jADcOVwHmzadg= -cloud.google.com/go/certificatemanager v1.4.0/go.mod h1:vowpercVFyqs8ABSmrdV+GiFf2H/ch3KyudYQEMM590= -cloud.google.com/go/certificatemanager v1.6.0/go.mod h1:3Hh64rCKjRAX8dXgRAyOcY5vQ/fE1sh8o+Mdd6KPgY8= -cloud.google.com/go/certificatemanager v1.7.1/go.mod h1:iW8J3nG6SaRYImIa+wXQ0g8IgoofDFRp5UMzaNk1UqI= -cloud.google.com/go/channel v1.8.0/go.mod h1:W5SwCXDJsq/rg3tn3oG0LOxpAo6IMxNa09ngphpSlnk= -cloud.google.com/go/channel v1.9.0/go.mod h1:jcu05W0my9Vx4mt3/rEHpfxc9eKi9XwsdDL8yBMbKUk= -cloud.google.com/go/channel v1.11.0/go.mod h1:IdtI0uWGqhEeatSB62VOoJ8FSUhJ9/+iGkJVqp74CGE= -cloud.google.com/go/channel v1.12.0/go.mod h1:VkxCGKASi4Cq7TbXxlaBezonAYpp1GCnKMY6tnMQnLU= -cloud.google.com/go/channel v1.16.0/go.mod h1:eN/q1PFSl5gyu0dYdmxNXscY/4Fi7ABmeHCJNf/oHmc= -cloud.google.com/go/cloudbuild v1.3.0/go.mod h1:WequR4ULxlqvMsjDEEEFnOG5ZSRSgWOywXYDb1vPE6U= -cloud.google.com/go/cloudbuild v1.4.0/go.mod h1:5Qwa40LHiOXmz3386FrjrYM93rM/hdRr7b53sySrTqA= -cloud.google.com/go/cloudbuild v1.6.0/go.mod h1:UIbc/w9QCbH12xX+ezUsgblrWv+Cv4Tw83GiSMHOn9M= -cloud.google.com/go/cloudbuild v1.7.0/go.mod h1:zb5tWh2XI6lR9zQmsm1VRA+7OCuve5d8S+zJUul8KTg= -cloud.google.com/go/cloudbuild v1.9.0/go.mod h1:qK1d7s4QlO0VwfYn5YuClDGg2hfmLZEb4wQGAbIgL1s= -cloud.google.com/go/cloudbuild v1.10.1/go.mod h1:lyJg7v97SUIPq4RC2sGsz/9tNczhyv2AjML/ci4ulzU= -cloud.google.com/go/clouddms v1.3.0/go.mod h1:oK6XsCDdW4Ib3jCCBugx+gVjevp2TMXFtgxvPSee3OM= -cloud.google.com/go/clouddms v1.4.0/go.mod h1:Eh7sUGCC+aKry14O1NRljhjyrr0NFC0G2cjwX0cByRk= -cloud.google.com/go/clouddms v1.5.0/go.mod h1:QSxQnhikCLUw13iAbffF2CZxAER3xDGNHjsTAkQJcQA= -cloud.google.com/go/clouddms v1.6.1/go.mod h1:Ygo1vL52Ov4TBZQquhz5fiw2CQ58gvu+PlS6PVXCpZI= cloud.google.com/go/cloudtasks v1.5.0/go.mod h1:fD92REy1x5woxkKEkLdvavGnPJGEn8Uic9nWuLzqCpY= cloud.google.com/go/cloudtasks v1.6.0/go.mod h1:C6Io+sxuke9/KNRkbQpihnW93SWDU3uXt92nu85HkYI= -cloud.google.com/go/cloudtasks v1.7.0/go.mod h1:ImsfdYWwlWNJbdgPIIGJWC+gemEGTBK/SunNQQNCAb4= -cloud.google.com/go/cloudtasks v1.8.0/go.mod h1:gQXUIwCSOI4yPVK7DgTVFiiP0ZW/eQkydWzwVMdHxrI= -cloud.google.com/go/cloudtasks v1.9.0/go.mod h1:w+EyLsVkLWHcOaqNEyvcKAsWp9p29dL6uL9Nst1cI7Y= -cloud.google.com/go/cloudtasks v1.10.0/go.mod h1:NDSoTLkZ3+vExFEWu2UJV1arUyzVDAiZtdWcsUyNwBs= -cloud.google.com/go/cloudtasks v1.11.1/go.mod h1:a9udmnou9KO2iulGscKR0qBYjreuX8oHwpmFsKspEvM= cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= @@ -191,439 +70,101 @@ cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU= cloud.google.com/go/compute v1.7.0/go.mod h1:435lt8av5oL9P3fv1OEzSbSUe+ybHXGMPQHHZWZxy9U= cloud.google.com/go/compute v1.10.0/go.mod h1:ER5CLbMxl90o2jtNbGSbtfOpQKR0t15FOtRsugnLrlU= -cloud.google.com/go/compute v1.12.0/go.mod h1:e8yNOBcBONZU1vJKCvCoDw/4JQsA0dpM4x/6PIIOocU= -cloud.google.com/go/compute v1.12.1/go.mod h1:e8yNOBcBONZU1vJKCvCoDw/4JQsA0dpM4x/6PIIOocU= -cloud.google.com/go/compute v1.13.0/go.mod h1:5aPTS0cUNMIc1CE546K+Th6weJUNQErARyZtRXDJ8GE= -cloud.google.com/go/compute v1.14.0/go.mod h1:YfLtxrj9sU4Yxv+sXzZkyPjEyPBZfXHUvjxega5vAdo= -cloud.google.com/go/compute v1.18.0/go.mod h1:1X7yHxec2Ga+Ss6jPyjxRxpu2uu7PLgsOVXvgU0yacs= -cloud.google.com/go/compute v1.19.0/go.mod h1:rikpw2y+UMidAe9tISo04EHNOIf42RLYF/q8Bs93scU= -cloud.google.com/go/compute v1.19.3/go.mod h1:qxvISKp/gYnXkSAD1ppcSOveRAmzxicEv/JlizULFrI= -cloud.google.com/go/compute v1.20.1/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM= -cloud.google.com/go/compute/metadata v0.1.0/go.mod h1:Z1VN+bulIf6bt4P/C37K4DyZYZEXYonfTBHHFPO/4UU= -cloud.google.com/go/compute/metadata v0.2.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= -cloud.google.com/go/compute/metadata v0.2.1/go.mod h1:jgHgmJd2RKBGzXqF5LR2EZMGxBkeanZ9wwa75XHJgOM= +cloud.google.com/go/compute v1.23.3 h1:6sVlXXBmbd7jNX0Ipq0trII3e4n1/MsADLK6a+aiVlk= +cloud.google.com/go/compute v1.23.3/go.mod h1:VCgBUoMnIVIR0CscqQiPJLAG25E3ZRZMzcFZeQ+h8CI= +cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= -cloud.google.com/go/contactcenterinsights v1.3.0/go.mod h1:Eu2oemoePuEFc/xKFPjbTuPSj0fYJcPls9TFlPNnHHY= -cloud.google.com/go/contactcenterinsights v1.4.0/go.mod h1:L2YzkGbPsv+vMQMCADxJoT9YiTTnSEd6fEvCeHTYVck= -cloud.google.com/go/contactcenterinsights v1.6.0/go.mod h1:IIDlT6CLcDoyv79kDv8iWxMSTZhLxSCofVV5W6YFM/w= -cloud.google.com/go/contactcenterinsights v1.9.1/go.mod h1:bsg/R7zGLYMVxFFzfh9ooLTruLRCG9fnzhH9KznHhbM= -cloud.google.com/go/container v1.6.0/go.mod h1:Xazp7GjJSeUYo688S+6J5V+n/t+G5sKBTFkKNudGRxg= -cloud.google.com/go/container v1.7.0/go.mod h1:Dp5AHtmothHGX3DwwIHPgq45Y8KmNsgN3amoYfxVkLo= -cloud.google.com/go/container v1.13.1/go.mod h1:6wgbMPeQRw9rSnKBCAJXnds3Pzj03C4JHamr8asWKy4= -cloud.google.com/go/container v1.14.0/go.mod h1:3AoJMPhHfLDxLvrlVWaK57IXzaPnLaZq63WX59aQBfM= -cloud.google.com/go/container v1.15.0/go.mod h1:ft+9S0WGjAyjDggg5S06DXj+fHJICWg8L7isCQe9pQA= -cloud.google.com/go/container v1.22.1/go.mod h1:lTNExE2R7f+DLbAN+rJiKTisauFCaoDq6NURZ83eVH4= cloud.google.com/go/containeranalysis v0.5.1/go.mod h1:1D92jd8gRR/c0fGMlymRgxWD3Qw9C1ff6/T7mLgVL8I= cloud.google.com/go/containeranalysis v0.6.0/go.mod h1:HEJoiEIu+lEXM+k7+qLCci0h33lX3ZqoYFdmPcoO7s4= -cloud.google.com/go/containeranalysis v0.7.0/go.mod h1:9aUL+/vZ55P2CXfuZjS4UjQ9AgXoSw8Ts6lemfmxBxI= -cloud.google.com/go/containeranalysis v0.9.0/go.mod h1:orbOANbwk5Ejoom+s+DUCTTJ7IBdBQJDcSylAx/on9s= -cloud.google.com/go/containeranalysis v0.10.1/go.mod h1:Ya2jiILITMY68ZLPaogjmOMNkwsDrWBSTyBubGXO7j0= cloud.google.com/go/datacatalog v1.3.0/go.mod h1:g9svFY6tuR+j+hrTw3J2dNcmI0dzmSiyOzm8kpLq0a0= cloud.google.com/go/datacatalog v1.5.0/go.mod h1:M7GPLNQeLfWqeIm3iuiruhPzkt65+Bx8dAKvScX8jvs= cloud.google.com/go/datacatalog v1.6.0/go.mod h1:+aEyF8JKg+uXcIdAmmaMUmZ3q1b/lKLtXCmXdnc0lbc= -cloud.google.com/go/datacatalog v1.7.0/go.mod h1:9mEl4AuDYWw81UGc41HonIHH7/sn52H0/tc8f8ZbZIE= -cloud.google.com/go/datacatalog v1.8.0/go.mod h1:KYuoVOv9BM8EYz/4eMFxrr4DUKhGIOXxZoKYF5wdISM= -cloud.google.com/go/datacatalog v1.8.1/go.mod h1:RJ58z4rMp3gvETA465Vg+ag8BGgBdnRPEMMSTr5Uv+M= -cloud.google.com/go/datacatalog v1.12.0/go.mod h1:CWae8rFkfp6LzLumKOnmVh4+Zle4A3NXLzVJ1d1mRm0= -cloud.google.com/go/datacatalog v1.13.0/go.mod h1:E4Rj9a5ZtAxcQJlEBTLgMTphfP11/lNaAshpoBgemX8= -cloud.google.com/go/datacatalog v1.14.0/go.mod h1:h0PrGtlihoutNMp/uvwhawLQ9+c63Kz65UFqh49Yo+E= -cloud.google.com/go/datacatalog v1.14.1/go.mod h1:d2CevwTG4yedZilwe+v3E3ZBDRMobQfSG/a6cCCN5R4= cloud.google.com/go/dataflow v0.6.0/go.mod h1:9QwV89cGoxjjSR9/r7eFDqqjtvbKxAK2BaYU6PVk9UM= cloud.google.com/go/dataflow v0.7.0/go.mod h1:PX526vb4ijFMesO1o202EaUmouZKBpjHsTlCtB4parQ= -cloud.google.com/go/dataflow v0.8.0/go.mod h1:Rcf5YgTKPtQyYz8bLYhFoIV/vP39eL7fWNcSOyFfLJE= -cloud.google.com/go/dataflow v0.9.1/go.mod h1:Wp7s32QjYuQDWqJPFFlnBKhkAtiFpMTdg00qGbnIHVw= cloud.google.com/go/dataform v0.3.0/go.mod h1:cj8uNliRlHpa6L3yVhDOBrUXH+BPAO1+KFMQQNSThKo= cloud.google.com/go/dataform v0.4.0/go.mod h1:fwV6Y4Ty2yIFL89huYlEkwUPtS7YZinZbzzj5S9FzCE= -cloud.google.com/go/dataform v0.5.0/go.mod h1:GFUYRe8IBa2hcomWplodVmUx/iTL0FrsauObOM3Ipr0= -cloud.google.com/go/dataform v0.6.0/go.mod h1:QPflImQy33e29VuapFdf19oPbE4aYTJxr31OAPV+ulA= -cloud.google.com/go/dataform v0.7.0/go.mod h1:7NulqnVozfHvWUBpMDfKMUESr+85aJsC/2O0o3jWPDE= -cloud.google.com/go/dataform v0.8.1/go.mod h1:3BhPSiw8xmppbgzeBbmDvmSWlwouuJkXsXsb8UBih9M= -cloud.google.com/go/datafusion v1.4.0/go.mod h1:1Zb6VN+W6ALo85cXnM1IKiPw+yQMKMhB9TsTSRDo/38= -cloud.google.com/go/datafusion v1.5.0/go.mod h1:Kz+l1FGHB0J+4XF2fud96WMmRiq/wj8N9u007vyXZ2w= -cloud.google.com/go/datafusion v1.6.0/go.mod h1:WBsMF8F1RhSXvVM8rCV3AeyWVxcC2xY6vith3iw3S+8= -cloud.google.com/go/datafusion v1.7.1/go.mod h1:KpoTBbFmoToDExJUso/fcCiguGDk7MEzOWXUsJo0wsI= cloud.google.com/go/datalabeling v0.5.0/go.mod h1:TGcJ0G2NzcsXSE/97yWjIZO0bXj0KbVlINXMG9ud42I= cloud.google.com/go/datalabeling v0.6.0/go.mod h1:WqdISuk/+WIGeMkpw/1q7bK/tFEZxsrFJOJdY2bXvTQ= -cloud.google.com/go/datalabeling v0.7.0/go.mod h1:WPQb1y08RJbmpM3ww0CSUAGweL0SxByuW2E+FU+wXcM= -cloud.google.com/go/datalabeling v0.8.1/go.mod h1:XS62LBSVPbYR54GfYQsPXZjTW8UxCK2fkDciSrpRFdY= -cloud.google.com/go/dataplex v1.3.0/go.mod h1:hQuRtDg+fCiFgC8j0zV222HvzFQdRd+SVX8gdmFcZzA= -cloud.google.com/go/dataplex v1.4.0/go.mod h1:X51GfLXEMVJ6UN47ESVqvlsRplbLhcsAt0kZCCKsU0A= -cloud.google.com/go/dataplex v1.5.2/go.mod h1:cVMgQHsmfRoI5KFYq4JtIBEUbYwc3c7tXmIDhRmNNVQ= -cloud.google.com/go/dataplex v1.6.0/go.mod h1:bMsomC/aEJOSpHXdFKFGQ1b0TDPIeL28nJObeO1ppRs= -cloud.google.com/go/dataplex v1.8.1/go.mod h1:7TyrDT6BCdI8/38Uvp0/ZxBslOslP2X2MPDucliyvSE= -cloud.google.com/go/dataproc v1.7.0/go.mod h1:CKAlMjII9H90RXaMpSxQ8EU6dQx6iAYNPcYPOkSbi8s= -cloud.google.com/go/dataproc v1.8.0/go.mod h1:5OW+zNAH0pMpw14JVrPONsxMQYMBqJuzORhIBfBn9uI= -cloud.google.com/go/dataproc v1.12.0/go.mod h1:zrF3aX0uV3ikkMz6z4uBbIKyhRITnxvr4i3IjKsKrw4= cloud.google.com/go/dataqna v0.5.0/go.mod h1:90Hyk596ft3zUQ8NkFfvICSIfHFh1Bc7C4cK3vbhkeo= cloud.google.com/go/dataqna v0.6.0/go.mod h1:1lqNpM7rqNLVgWBJyk5NF6Uen2PHym0jtVJonplVsDA= -cloud.google.com/go/dataqna v0.7.0/go.mod h1:Lx9OcIIeqCrw1a6KdO3/5KMP1wAmTc0slZWwP12Qq3c= -cloud.google.com/go/dataqna v0.8.1/go.mod h1:zxZM0Bl6liMePWsHA8RMGAfmTG34vJMapbHAxQ5+WA8= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/datastore v1.10.0/go.mod h1:PC5UzAmDEkAmkfaknstTYbNpgE49HAgW2J1gcgUfmdM= -cloud.google.com/go/datastore v1.11.0/go.mod h1:TvGxBIHCS50u8jzG+AW/ppf87v1of8nwzFNgEZU1D3c= -cloud.google.com/go/datastore v1.12.0/go.mod h1:KjdB88W897MRITkvWWJrg2OUtrR5XVj1EoLgSp6/N70= cloud.google.com/go/datastream v1.2.0/go.mod h1:i/uTP8/fZwgATHS/XFu0TcNUhuA0twZxxQ3EyCUQMwo= cloud.google.com/go/datastream v1.3.0/go.mod h1:cqlOX8xlyYF/uxhiKn6Hbv6WjwPPuI9W2M9SAXwaLLQ= -cloud.google.com/go/datastream v1.4.0/go.mod h1:h9dpzScPhDTs5noEMQVWP8Wx8AFBRyS0s8KWPx/9r0g= -cloud.google.com/go/datastream v1.5.0/go.mod h1:6TZMMNPwjUqZHBKPQ1wwXpb0d5VDVPl2/XoS5yi88q4= -cloud.google.com/go/datastream v1.6.0/go.mod h1:6LQSuswqLa7S4rPAOZFVjHIG3wJIjZcZrw8JDEDJuIs= -cloud.google.com/go/datastream v1.7.0/go.mod h1:uxVRMm2elUSPuh65IbZpzJNMbuzkcvu5CjMqVIUHrww= -cloud.google.com/go/datastream v1.9.1/go.mod h1:hqnmr8kdUBmrnk65k5wNRoHSCYksvpdZIcZIEl8h43Q= -cloud.google.com/go/deploy v1.4.0/go.mod h1:5Xghikd4VrmMLNaF6FiRFDlHb59VM59YoDQnOUdsH/c= -cloud.google.com/go/deploy v1.5.0/go.mod h1:ffgdD0B89tToyW/U/D2eL0jN2+IEV/3EMuXHA0l4r+s= -cloud.google.com/go/deploy v1.6.0/go.mod h1:f9PTHehG/DjCom3QH0cntOVRm93uGBDt2vKzAPwpXQI= -cloud.google.com/go/deploy v1.8.0/go.mod h1:z3myEJnA/2wnB4sgjqdMfgxCA0EqC3RBTNcVPs93mtQ= -cloud.google.com/go/deploy v1.11.0/go.mod h1:tKuSUV5pXbn67KiubiUNUejqLs4f5cxxiCNCeyl0F2g= cloud.google.com/go/dialogflow v1.15.0/go.mod h1:HbHDWs33WOGJgn6rfzBW1Kv807BE3O1+xGbn59zZWI4= cloud.google.com/go/dialogflow v1.16.1/go.mod h1:po6LlzGfK+smoSmTBnbkIZY2w8ffjz/RcGSS+sh1el0= cloud.google.com/go/dialogflow v1.17.0/go.mod h1:YNP09C/kXA1aZdBgC/VtXX74G/TKn7XVCcVumTflA+8= -cloud.google.com/go/dialogflow v1.18.0/go.mod h1:trO7Zu5YdyEuR+BhSNOqJezyFQ3aUzz0njv7sMx/iek= -cloud.google.com/go/dialogflow v1.19.0/go.mod h1:JVmlG1TwykZDtxtTXujec4tQ+D8SBFMoosgy+6Gn0s0= -cloud.google.com/go/dialogflow v1.29.0/go.mod h1:b+2bzMe+k1s9V+F2jbJwpHPzrnIyHihAdRFMtn2WXuM= -cloud.google.com/go/dialogflow v1.31.0/go.mod h1:cuoUccuL1Z+HADhyIA7dci3N5zUssgpBJmCzI6fNRB4= -cloud.google.com/go/dialogflow v1.32.0/go.mod h1:jG9TRJl8CKrDhMEcvfcfFkkpp8ZhgPz3sBGmAUYJ2qE= -cloud.google.com/go/dialogflow v1.38.0/go.mod h1:L7jnH+JL2mtmdChzAIcXQHXMvQkE3U4hTaNltEuxXn4= -cloud.google.com/go/dlp v1.6.0/go.mod h1:9eyB2xIhpU0sVwUixfBubDoRwP+GjeUoxxeueZmqvmM= -cloud.google.com/go/dlp v1.7.0/go.mod h1:68ak9vCiMBjbasxeVD17hVPxDEck+ExiHavX8kiHG+Q= -cloud.google.com/go/dlp v1.9.0/go.mod h1:qdgmqgTyReTz5/YNSSuueR8pl7hO0o9bQ39ZhtgkWp4= -cloud.google.com/go/dlp v1.10.1/go.mod h1:IM8BWz1iJd8njcNcG0+Kyd9OPnqnRNkDV8j42VT5KOI= cloud.google.com/go/documentai v1.7.0/go.mod h1:lJvftZB5NRiFSX4moiye1SMxHx0Bc3x1+p9e/RfXYiU= cloud.google.com/go/documentai v1.8.0/go.mod h1:xGHNEB7CtsnySCNrCFdCyyMz44RhFEEX2Q7UD0c5IhU= -cloud.google.com/go/documentai v1.9.0/go.mod h1:FS5485S8R00U10GhgBC0aNGrJxBP8ZVpEeJ7PQDZd6k= -cloud.google.com/go/documentai v1.10.0/go.mod h1:vod47hKQIPeCfN2QS/jULIvQTugbmdc0ZvxxfQY1bg4= -cloud.google.com/go/documentai v1.16.0/go.mod h1:o0o0DLTEZ+YnJZ+J4wNfTxmDVyrkzFvttBXXtYRMHkM= -cloud.google.com/go/documentai v1.18.0/go.mod h1:F6CK6iUH8J81FehpskRmhLq/3VlwQvb7TvwOceQ2tbs= -cloud.google.com/go/documentai v1.20.0/go.mod h1:yJkInoMcK0qNAEdRnqY/D5asy73tnPe88I1YTZT+a8E= cloud.google.com/go/domains v0.6.0/go.mod h1:T9Rz3GasrpYk6mEGHh4rymIhjlnIuB4ofT1wTxDeT4Y= cloud.google.com/go/domains v0.7.0/go.mod h1:PtZeqS1xjnXuRPKE/88Iru/LdfoRyEHYA9nFQf4UKpg= -cloud.google.com/go/domains v0.8.0/go.mod h1:M9i3MMDzGFXsydri9/vW+EWz9sWb4I6WyHqdlAk0idE= -cloud.google.com/go/domains v0.9.1/go.mod h1:aOp1c0MbejQQ2Pjf1iJvnVyT+z6R6s8pX66KaCSDYfE= cloud.google.com/go/edgecontainer v0.1.0/go.mod h1:WgkZ9tp10bFxqO8BLPqv2LlfmQF1X8lZqwW4r1BTajk= cloud.google.com/go/edgecontainer v0.2.0/go.mod h1:RTmLijy+lGpQ7BXuTDa4C4ssxyXT34NIuHIgKuP4s5w= -cloud.google.com/go/edgecontainer v0.3.0/go.mod h1:FLDpP4nykgwwIfcLt6zInhprzw0lEi2P1fjO6Ie0qbc= -cloud.google.com/go/edgecontainer v1.0.0/go.mod h1:cttArqZpBB2q58W/upSG++ooo6EsblxDIolxa3jSjbY= -cloud.google.com/go/edgecontainer v1.1.1/go.mod h1:O5bYcS//7MELQZs3+7mabRqoWQhXCzenBu0R8bz2rwk= -cloud.google.com/go/errorreporting v0.3.0/go.mod h1:xsP2yaAp+OAW4OIm60An2bbLpqIhKXdWR/tawvl7QzU= -cloud.google.com/go/essentialcontacts v1.3.0/go.mod h1:r+OnHa5jfj90qIfZDO/VztSFqbQan7HV75p8sA+mdGI= -cloud.google.com/go/essentialcontacts v1.4.0/go.mod h1:8tRldvHYsmnBCHdFpvU+GL75oWiBKl80BiqlFh9tp+8= -cloud.google.com/go/essentialcontacts v1.5.0/go.mod h1:ay29Z4zODTuwliK7SnX8E86aUF2CTzdNtvv42niCX0M= -cloud.google.com/go/essentialcontacts v1.6.2/go.mod h1:T2tB6tX+TRak7i88Fb2N9Ok3PvY3UNbUsMag9/BARh4= -cloud.google.com/go/eventarc v1.7.0/go.mod h1:6ctpF3zTnaQCxUjHUdcfgcA1A2T309+omHZth7gDfmc= -cloud.google.com/go/eventarc v1.8.0/go.mod h1:imbzxkyAU4ubfsaKYdQg04WS1NvncblHEup4kvF+4gw= -cloud.google.com/go/eventarc v1.10.0/go.mod h1:u3R35tmZ9HvswGRBnF48IlYgYeBcPUCjkr4BTdem2Kw= -cloud.google.com/go/eventarc v1.11.0/go.mod h1:PyUjsUKPWoRBCHeOxZd/lbOOjahV41icXyUY5kSTvVY= -cloud.google.com/go/eventarc v1.12.1/go.mod h1:mAFCW6lukH5+IZjkvrEss+jmt2kOdYlN8aMx3sRJiAI= -cloud.google.com/go/filestore v1.3.0/go.mod h1:+qbvHGvXU1HaKX2nD0WEPo92TP/8AQuCVEBXNY9z0+w= -cloud.google.com/go/filestore v1.4.0/go.mod h1:PaG5oDfo9r224f8OYXURtAsY+Fbyq/bLYoINEK8XQAI= -cloud.google.com/go/filestore v1.5.0/go.mod h1:FqBXDWBp4YLHqRnVGveOkHDf8svj9r5+mUDLupOWEDs= -cloud.google.com/go/filestore v1.6.0/go.mod h1:di5unNuss/qfZTw2U9nhFqo8/ZDSc466dre85Kydllg= -cloud.google.com/go/filestore v1.7.1/go.mod h1:y10jsorq40JJnjR/lQ8AfFbbcGlw3g+Dp8oN7i7FjV4= -cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= -cloud.google.com/go/firestore v1.9.0/go.mod h1:HMkjKHNTtRyZNiMzu7YAsLr9K3X2udY2AMwDaMEQiiE= -cloud.google.com/go/firestore v1.11.0/go.mod h1:b38dKhgzlmNNGTNZZwe7ZRFEuRab1Hay3/DBsIGKKy4= cloud.google.com/go/functions v1.6.0/go.mod h1:3H1UA3qiIPRWD7PeZKLvHZ9SaQhR26XIJcC0A5GbvAk= cloud.google.com/go/functions v1.7.0/go.mod h1:+d+QBcWM+RsrgZfV9xo6KfA1GlzJfxcfZcRPEhDDfzg= -cloud.google.com/go/functions v1.8.0/go.mod h1:RTZ4/HsQjIqIYP9a9YPbU+QFoQsAlYgrwOXJWHn1POY= -cloud.google.com/go/functions v1.9.0/go.mod h1:Y+Dz8yGguzO3PpIjhLTbnqV1CWmgQ5UwtlpzoyquQ08= -cloud.google.com/go/functions v1.10.0/go.mod h1:0D3hEOe3DbEvCXtYOZHQZmD+SzYsi1YbI7dGvHfldXw= -cloud.google.com/go/functions v1.12.0/go.mod h1:AXWGrF3e2C/5ehvwYo/GH6O5s09tOPksiKhz+hH8WkA= -cloud.google.com/go/functions v1.13.0/go.mod h1:EU4O007sQm6Ef/PwRsI8N2umygGqPBS/IZQKBQBcJ3c= -cloud.google.com/go/functions v1.15.1/go.mod h1:P5yNWUTkyU+LvW/S9O6V+V423VZooALQlqoXdoPz5AE= cloud.google.com/go/gaming v1.5.0/go.mod h1:ol7rGcxP/qHTRQE/RO4bxkXq+Fix0j6D4LFPzYTIrDM= cloud.google.com/go/gaming v1.6.0/go.mod h1:YMU1GEvA39Qt3zWGyAVA9bpYz/yAhTvaQ1t2sK4KPUA= -cloud.google.com/go/gaming v1.7.0/go.mod h1:LrB8U7MHdGgFG851iHAfqUdLcKBdQ55hzXy9xBJz0+w= -cloud.google.com/go/gaming v1.8.0/go.mod h1:xAqjS8b7jAVW0KFYeRUxngo9My3f33kFmua++Pi+ggM= -cloud.google.com/go/gaming v1.9.0/go.mod h1:Fc7kEmCObylSWLO334NcO+O9QMDyz+TKC4v1D7X+Bc0= -cloud.google.com/go/gaming v1.10.1/go.mod h1:XQQvtfP8Rb9Rxnxm5wFVpAp9zCQkJi2bLIb7iHGwB3s= -cloud.google.com/go/gkebackup v0.2.0/go.mod h1:XKvv/4LfG829/B8B7xRkk8zRrOEbKtEam6yNfuQNH60= -cloud.google.com/go/gkebackup v0.3.0/go.mod h1:n/E671i1aOQvUxT541aTkCwExO/bTer2HDlj4TsBRAo= -cloud.google.com/go/gkebackup v0.4.0/go.mod h1:byAyBGUwYGEEww7xsbnUTBHIYcOPy/PgUWUtOeRm9Vg= cloud.google.com/go/gkeconnect v0.5.0/go.mod h1:c5lsNAg5EwAy7fkqX/+goqFsU1Da/jQFqArp+wGNr/o= cloud.google.com/go/gkeconnect v0.6.0/go.mod h1:Mln67KyU/sHJEBY8kFZ0xTeyPtzbq9StAVvEULYK16A= -cloud.google.com/go/gkeconnect v0.7.0/go.mod h1:SNfmVqPkaEi3bF/B3CNZOAYPYdg7sU+obZ+QTky2Myw= -cloud.google.com/go/gkeconnect v0.8.1/go.mod h1:KWiK1g9sDLZqhxB2xEuPV8V9NYzrqTUmQR9shJHpOZw= cloud.google.com/go/gkehub v0.9.0/go.mod h1:WYHN6WG8w9bXU0hqNxt8rm5uxnk8IH+lPY9J2TV7BK0= cloud.google.com/go/gkehub v0.10.0/go.mod h1:UIPwxI0DsrpsVoWpLB0stwKCP+WFVG9+y977wO+hBH0= -cloud.google.com/go/gkehub v0.11.0/go.mod h1:JOWHlmN+GHyIbuWQPl47/C2RFhnFKH38jH9Ascu3n0E= -cloud.google.com/go/gkehub v0.12.0/go.mod h1:djiIwwzTTBrF5NaXCGv3mf7klpEMcST17VBTVVDcuaw= -cloud.google.com/go/gkehub v0.14.1/go.mod h1:VEXKIJZ2avzrbd7u+zeMtW00Y8ddk/4V9511C9CQGTY= -cloud.google.com/go/gkemulticloud v0.3.0/go.mod h1:7orzy7O0S+5kq95e4Hpn7RysVA7dPs8W/GgfUtsPbrA= -cloud.google.com/go/gkemulticloud v0.4.0/go.mod h1:E9gxVBnseLWCk24ch+P9+B2CoDFJZTyIgLKSalC7tuI= -cloud.google.com/go/gkemulticloud v0.5.0/go.mod h1:W0JDkiyi3Tqh0TJr//y19wyb1yf8llHVto2Htf2Ja3Y= -cloud.google.com/go/gkemulticloud v0.6.1/go.mod h1:kbZ3HKyTsiwqKX7Yw56+wUGwwNZViRnxWK2DVknXWfw= cloud.google.com/go/grafeas v0.2.0/go.mod h1:KhxgtF2hb0P191HlY5besjYm6MqTSTj3LSI+M+ByZHc= -cloud.google.com/go/grafeas v0.3.0/go.mod h1:P7hgN24EyONOTMyeJH6DxG4zD7fwiYa5Q6GUgyFSOU8= -cloud.google.com/go/gsuiteaddons v1.3.0/go.mod h1:EUNK/J1lZEZO8yPtykKxLXI6JSVN2rg9bN8SXOa0bgM= -cloud.google.com/go/gsuiteaddons v1.4.0/go.mod h1:rZK5I8hht7u7HxFQcFei0+AtfS9uSushomRlg+3ua1o= -cloud.google.com/go/gsuiteaddons v1.5.0/go.mod h1:TFCClYLd64Eaa12sFVmUyG62tk4mdIsI7pAnSXRkcFo= -cloud.google.com/go/gsuiteaddons v1.6.1/go.mod h1:CodrdOqRZcLp5WOwejHWYBjZvfY0kOphkAKpF/3qdZY= -cloud.google.com/go/iam v0.1.0/go.mod h1:vcUNEa0pEm0qRVpmWepWaFMIAI8/hjB9mO8rNCJtF6c= cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= cloud.google.com/go/iam v0.5.0/go.mod h1:wPU9Vt0P4UmCux7mqtRu6jcpPAb74cP1fh50J3QpkUc= -cloud.google.com/go/iam v0.6.0/go.mod h1:+1AH33ueBne5MzYccyMHtEKqLE4/kJOibtffMHDMFMc= -cloud.google.com/go/iam v0.7.0/go.mod h1:H5Br8wRaDGNc8XP3keLc4unfUUZeyH3Sfl9XpQEYOeg= -cloud.google.com/go/iam v0.8.0/go.mod h1:lga0/y3iH6CX7sYqypWJ33hf7kkfXJag67naqGESjkE= -cloud.google.com/go/iam v0.11.0/go.mod h1:9PiLDanza5D+oWFZiH1uG+RnRCfEGKoyl6yo4cgWZGY= -cloud.google.com/go/iam v0.12.0/go.mod h1:knyHGviacl11zrtZUoDuYpDgLjvr28sLQaG0YB2GYAY= -cloud.google.com/go/iam v0.13.0/go.mod h1:ljOg+rcNfzZ5d6f1nAUJ8ZIxOaZUVoS14bKCtaLZ/D0= -cloud.google.com/go/iam v1.0.1/go.mod h1:yR3tmSL8BcZB4bxByRv2jkSIahVmCtfKZwLYGBalRE8= -cloud.google.com/go/iam v1.1.0/go.mod h1:nxdHjaKfCr7fNYx/HJMM8LgiMugmveWlkatear5gVyk= -cloud.google.com/go/iap v1.4.0/go.mod h1:RGFwRJdihTINIe4wZ2iCP0zF/qu18ZwyKxrhMhygBEc= -cloud.google.com/go/iap v1.5.0/go.mod h1:UH/CGgKd4KyohZL5Pt0jSKE4m3FR51qg6FKQ/z/Ix9A= -cloud.google.com/go/iap v1.6.0/go.mod h1:NSuvI9C/j7UdjGjIde7t7HBz+QTwBcapPE07+sSRcLk= -cloud.google.com/go/iap v1.7.0/go.mod h1:beqQx56T9O1G1yNPph+spKpNibDlYIiIixiqsQXxLIo= -cloud.google.com/go/iap v1.7.1/go.mod h1:WapEwPc7ZxGt2jFGB/C/bm+hP0Y6NXzOYGjpPnmMS74= -cloud.google.com/go/iap v1.8.1/go.mod h1:sJCbeqg3mvWLqjZNsI6dfAtbbV1DL2Rl7e1mTyXYREQ= -cloud.google.com/go/ids v1.1.0/go.mod h1:WIuwCaYVOzHIj2OhN9HAwvW+DBdmUAdcWlFxRl+KubM= -cloud.google.com/go/ids v1.2.0/go.mod h1:5WXvp4n25S0rA/mQWAg1YEEBBq6/s+7ml1RDCW1IrcY= -cloud.google.com/go/ids v1.3.0/go.mod h1:JBdTYwANikFKaDP6LtW5JAi4gubs57SVNQjemdt6xV4= -cloud.google.com/go/ids v1.4.1/go.mod h1:np41ed8YMU8zOgv53MMMoCntLTn2lF+SUzlM+O3u/jw= -cloud.google.com/go/iot v1.3.0/go.mod h1:r7RGh2B61+B8oz0AGE+J72AhA0G7tdXItODWsaA2oLs= -cloud.google.com/go/iot v1.4.0/go.mod h1:dIDxPOn0UvNDUMD8Ger7FIaTuvMkj+aGk94RPP0iV+g= -cloud.google.com/go/iot v1.5.0/go.mod h1:mpz5259PDl3XJthEmh9+ap0affn/MqNSP4My77Qql9o= -cloud.google.com/go/iot v1.6.0/go.mod h1:IqdAsmE2cTYYNO1Fvjfzo9po179rAtJeVGUvkLN3rLE= -cloud.google.com/go/iot v1.7.1/go.mod h1:46Mgw7ev1k9KqK1ao0ayW9h0lI+3hxeanz+L1zmbbbk= -cloud.google.com/go/kms v1.4.0/go.mod h1:fajBHndQ+6ubNw6Ss2sSd+SWvjL26RNo/dr7uxsnnOA= -cloud.google.com/go/kms v1.5.0/go.mod h1:QJS2YY0eJGBg3mnDfuaCyLauWwBJiHRboYxJ++1xJNg= -cloud.google.com/go/kms v1.6.0/go.mod h1:Jjy850yySiasBUDi6KFUwUv2n1+o7QZFyuUJg6OgjA0= -cloud.google.com/go/kms v1.8.0/go.mod h1:4xFEhYFqvW+4VMELtZyxomGSYtSQKzM178ylFW4jMAg= -cloud.google.com/go/kms v1.9.0/go.mod h1:qb1tPTgfF9RQP8e1wq4cLFErVuTJv7UsSC915J8dh3w= -cloud.google.com/go/kms v1.10.0/go.mod h1:ng3KTUtQQU9bPX3+QGLsflZIHlkbn8amFAMY63m8d24= -cloud.google.com/go/kms v1.10.1/go.mod h1:rIWk/TryCkR59GMC3YtHtXeLzd634lBbKenvyySAyYI= -cloud.google.com/go/kms v1.11.0/go.mod h1:hwdiYC0xjnWsKQQCQQmIQnS9asjYVSK6jtXm+zFqXLM= -cloud.google.com/go/kms v1.12.1/go.mod h1:c9J991h5DTl+kg7gi3MYomh12YEENGrf48ee/N/2CDM= +cloud.google.com/go/iam v1.1.5 h1:1jTsCu4bcsNsE4iiqNT5SHwrDRCfRmIaaaVFhRveTJI= +cloud.google.com/go/iam v1.1.5/go.mod h1:rB6P/Ic3mykPbFio+vo7403drjlgvoWfYpJhMXEbzv8= cloud.google.com/go/language v1.4.0/go.mod h1:F9dRpNFQmJbkaop6g0JhSBXCNlO90e1KWx5iDdxbWic= cloud.google.com/go/language v1.6.0/go.mod h1:6dJ8t3B+lUYfStgls25GusK04NLh3eDLQnWM3mdEbhI= -cloud.google.com/go/language v1.7.0/go.mod h1:DJ6dYN/W+SQOjF8e1hLQXMF21AkH2w9wiPzPCJa2MIE= -cloud.google.com/go/language v1.8.0/go.mod h1:qYPVHf7SPoNNiCL2Dr0FfEFNil1qi3pQEyygwpgVKB8= -cloud.google.com/go/language v1.9.0/go.mod h1:Ns15WooPM5Ad/5no/0n81yUetis74g3zrbeJBE+ptUY= -cloud.google.com/go/language v1.10.1/go.mod h1:CPp94nsdVNiQEt1CNjF5WkTcisLiHPyIbMhvR8H2AW0= cloud.google.com/go/lifesciences v0.5.0/go.mod h1:3oIKy8ycWGPUyZDR/8RNnTOYevhaMLqh5vLUXs9zvT8= cloud.google.com/go/lifesciences v0.6.0/go.mod h1:ddj6tSX/7BOnhxCSd3ZcETvtNr8NZ6t/iPhY2Tyfu08= -cloud.google.com/go/lifesciences v0.8.0/go.mod h1:lFxiEOMqII6XggGbOnKiyZ7IBwoIqA84ClvoezaA/bo= -cloud.google.com/go/lifesciences v0.9.1/go.mod h1:hACAOd1fFbCGLr/+weUKRAJas82Y4vrL3O5326N//Wc= -cloud.google.com/go/logging v1.6.1/go.mod h1:5ZO0mHHbvm8gEmeEUHrmDlTDSu5imF6MUP9OfilNXBw= -cloud.google.com/go/logging v1.7.0/go.mod h1:3xjP2CjkM3ZkO73aj4ASA5wRPGGCRrPIAeNqVNkzY8M= -cloud.google.com/go/longrunning v0.1.1/go.mod h1:UUFxuDWkv22EuY93jjmDMFT5GPQKeFVJBIF6QlTqdsE= -cloud.google.com/go/longrunning v0.3.0/go.mod h1:qth9Y41RRSUE69rDcOn6DdK3HfQfsUI0YSmW3iIlLJc= -cloud.google.com/go/longrunning v0.4.1/go.mod h1:4iWDqhBZ70CvZ6BfETbvam3T8FMvLK+eFj0E6AaRQTo= -cloud.google.com/go/longrunning v0.4.2/go.mod h1:OHrnaYyLUV6oqwh0xiS7e5sLQhP1m0QU9R+WhGDMgIQ= -cloud.google.com/go/longrunning v0.5.0/go.mod h1:0JNuqRShmscVAhIACGtskSAWtqtOoPkwP0YF1oVEchc= -cloud.google.com/go/longrunning v0.5.1/go.mod h1:spvimkwdz6SPWKEt/XBij79E9fiTkHSQl/fRUUQJYJc= -cloud.google.com/go/managedidentities v1.3.0/go.mod h1:UzlW3cBOiPrzucO5qWkNkh0w33KFtBJU281hacNvsdE= -cloud.google.com/go/managedidentities v1.4.0/go.mod h1:NWSBYbEMgqmbZsLIyKvxrYbtqOsxY1ZrGM+9RgDqInM= -cloud.google.com/go/managedidentities v1.5.0/go.mod h1:+dWcZ0JlUmpuxpIDfyP5pP5y0bLdRwOS4Lp7gMni/LA= -cloud.google.com/go/managedidentities v1.6.1/go.mod h1:h/irGhTN2SkZ64F43tfGPMbHnypMbu4RB3yl8YcuEak= -cloud.google.com/go/maps v0.1.0/go.mod h1:BQM97WGyfw9FWEmQMpZ5T6cpovXXSd1cGmFma94eubI= -cloud.google.com/go/maps v0.6.0/go.mod h1:o6DAMMfb+aINHz/p/jbcY+mYeXBoZoxTfdSQ8VAJaCw= -cloud.google.com/go/maps v0.7.0/go.mod h1:3GnvVl3cqeSvgMcpRlQidXsPYuDGQ8naBis7MVzpXsY= cloud.google.com/go/mediatranslation v0.5.0/go.mod h1:jGPUhGTybqsPQn91pNXw0xVHfuJ3leR1wj37oU3y1f4= cloud.google.com/go/mediatranslation v0.6.0/go.mod h1:hHdBCTYNigsBxshbznuIMFNe5QXEowAuNmmC7h8pu5w= -cloud.google.com/go/mediatranslation v0.7.0/go.mod h1:LCnB/gZr90ONOIQLgSXagp8XUW1ODs2UmUMvcgMfI2I= -cloud.google.com/go/mediatranslation v0.8.1/go.mod h1:L/7hBdEYbYHQJhX2sldtTO5SZZ1C1vkapubj0T2aGig= cloud.google.com/go/memcache v1.4.0/go.mod h1:rTOfiGZtJX1AaFUrOgsMHX5kAzaTQ8azHiuDoTPzNsE= cloud.google.com/go/memcache v1.5.0/go.mod h1:dk3fCK7dVo0cUU2c36jKb4VqKPS22BTkf81Xq617aWM= -cloud.google.com/go/memcache v1.6.0/go.mod h1:XS5xB0eQZdHtTuTF9Hf8eJkKtR3pVRCcvJwtm68T3rA= -cloud.google.com/go/memcache v1.7.0/go.mod h1:ywMKfjWhNtkQTxrWxCkCFkoPjLHPW6A7WOTVI8xy3LY= -cloud.google.com/go/memcache v1.9.0/go.mod h1:8oEyzXCu+zo9RzlEaEjHl4KkgjlNDaXbCQeQWlzNFJM= -cloud.google.com/go/memcache v1.10.1/go.mod h1:47YRQIarv4I3QS5+hoETgKO40InqzLP6kpNLvyXuyaA= cloud.google.com/go/metastore v1.5.0/go.mod h1:2ZNrDcQwghfdtCwJ33nM0+GrBGlVuh8rakL3vdPY3XY= cloud.google.com/go/metastore v1.6.0/go.mod h1:6cyQTls8CWXzk45G55x57DVQ9gWg7RiH65+YgPsNh9s= -cloud.google.com/go/metastore v1.7.0/go.mod h1:s45D0B4IlsINu87/AsWiEVYbLaIMeUSoxlKKDqBGFS8= -cloud.google.com/go/metastore v1.8.0/go.mod h1:zHiMc4ZUpBiM7twCIFQmJ9JMEkDSyZS9U12uf7wHqSI= -cloud.google.com/go/metastore v1.10.0/go.mod h1:fPEnH3g4JJAk+gMRnrAnoqyv2lpUCqJPWOodSaf45Eo= -cloud.google.com/go/metastore v1.11.1/go.mod h1:uZuSo80U3Wd4zi6C22ZZliOUJ3XeM/MlYi/z5OAOWRA= -cloud.google.com/go/monitoring v1.7.0/go.mod h1:HpYse6kkGo//7p6sT0wsIC6IBDET0RhIsnmlA53dvEk= -cloud.google.com/go/monitoring v1.8.0/go.mod h1:E7PtoMJ1kQXWxPjB6mv2fhC5/15jInuulFdYYtlcvT4= -cloud.google.com/go/monitoring v1.12.0/go.mod h1:yx8Jj2fZNEkL/GYZyTLS4ZtZEZN8WtDEiEqG4kLK50w= -cloud.google.com/go/monitoring v1.13.0/go.mod h1:k2yMBAB1H9JT/QETjNkgdCGD9bPF712XiLTVr+cBrpw= -cloud.google.com/go/monitoring v1.15.1/go.mod h1:lADlSAlFdbqQuwwpaImhsJXu1QSdd3ojypXrFSMr2rM= cloud.google.com/go/networkconnectivity v1.4.0/go.mod h1:nOl7YL8odKyAOtzNX73/M5/mGZgqqMeryi6UPZTk/rA= cloud.google.com/go/networkconnectivity v1.5.0/go.mod h1:3GzqJx7uhtlM3kln0+x5wyFvuVH1pIBJjhCpjzSt75o= -cloud.google.com/go/networkconnectivity v1.6.0/go.mod h1:OJOoEXW+0LAxHh89nXd64uGG+FbQoeH8DtxCHVOMlaM= -cloud.google.com/go/networkconnectivity v1.7.0/go.mod h1:RMuSbkdbPwNMQjB5HBWD5MpTBnNm39iAVpC3TmsExt8= -cloud.google.com/go/networkconnectivity v1.10.0/go.mod h1:UP4O4sWXJG13AqrTdQCD9TnLGEbtNRqjuaaA7bNjF5E= -cloud.google.com/go/networkconnectivity v1.11.0/go.mod h1:iWmDD4QF16VCDLXUqvyspJjIEtBR/4zq5hwnY2X3scM= -cloud.google.com/go/networkconnectivity v1.12.1/go.mod h1:PelxSWYM7Sh9/guf8CFhi6vIqf19Ir/sbfZRUwXh92E= -cloud.google.com/go/networkmanagement v1.4.0/go.mod h1:Q9mdLLRn60AsOrPc8rs8iNV6OHXaGcDdsIQe1ohekq8= -cloud.google.com/go/networkmanagement v1.5.0/go.mod h1:ZnOeZ/evzUdUsnvRt792H0uYEnHQEMaz+REhhzJRcf4= -cloud.google.com/go/networkmanagement v1.6.0/go.mod h1:5pKPqyXjB/sgtvB5xqOemumoQNB7y95Q7S+4rjSOPYY= -cloud.google.com/go/networkmanagement v1.8.0/go.mod h1:Ho/BUGmtyEqrttTgWEe7m+8vDdK74ibQc+Be0q7Fof0= cloud.google.com/go/networksecurity v0.5.0/go.mod h1:xS6fOCoqpVC5zx15Z/MqkfDwH4+m/61A3ODiDV1xmiQ= cloud.google.com/go/networksecurity v0.6.0/go.mod h1:Q5fjhTr9WMI5mbpRYEbiexTzROf7ZbDzvzCrNl14nyU= -cloud.google.com/go/networksecurity v0.7.0/go.mod h1:mAnzoxx/8TBSyXEeESMy9OOYwo1v+gZ5eMRnsT5bC8k= -cloud.google.com/go/networksecurity v0.8.0/go.mod h1:B78DkqsxFG5zRSVuwYFRZ9Xz8IcQ5iECsNrPn74hKHU= -cloud.google.com/go/networksecurity v0.9.1/go.mod h1:MCMdxOKQ30wsBI1eI659f9kEp4wuuAueoC9AJKSPWZQ= cloud.google.com/go/notebooks v1.2.0/go.mod h1:9+wtppMfVPUeJ8fIWPOq1UnATHISkGXGqTkxeieQ6UY= cloud.google.com/go/notebooks v1.3.0/go.mod h1:bFR5lj07DtCPC7YAAJ//vHskFBxA5JzYlH68kXVdk34= -cloud.google.com/go/notebooks v1.4.0/go.mod h1:4QPMngcwmgb6uw7Po99B2xv5ufVoIQ7nOGDyL4P8AgA= -cloud.google.com/go/notebooks v1.5.0/go.mod h1:q8mwhnP9aR8Hpfnrc5iN5IBhrXUy8S2vuYs+kBJ/gu0= -cloud.google.com/go/notebooks v1.7.0/go.mod h1:PVlaDGfJgj1fl1S3dUwhFMXFgfYGhYQt2164xOMONmE= -cloud.google.com/go/notebooks v1.8.0/go.mod h1:Lq6dYKOYOWUCTvw5t2q1gp1lAp0zxAxRycayS0iJcqQ= -cloud.google.com/go/notebooks v1.9.1/go.mod h1:zqG9/gk05JrzgBt4ghLzEepPHNwE5jgPcHZRKhlC1A8= -cloud.google.com/go/optimization v1.1.0/go.mod h1:5po+wfvX5AQlPznyVEZjGJTMr4+CAkJf2XSTQOOl9l4= -cloud.google.com/go/optimization v1.2.0/go.mod h1:Lr7SOHdRDENsh+WXVmQhQTrzdu9ybg0NecjHidBq6xs= -cloud.google.com/go/optimization v1.3.1/go.mod h1:IvUSefKiwd1a5p0RgHDbWCIbDFgKuEdB+fPPuP0IDLI= -cloud.google.com/go/optimization v1.4.1/go.mod h1:j64vZQP7h9bO49m2rVaTVoNM0vEBEN5eKPUPbZyXOrk= -cloud.google.com/go/orchestration v1.3.0/go.mod h1:Sj5tq/JpWiB//X/q3Ngwdl5K7B7Y0KZ7bfv0wL6fqVA= -cloud.google.com/go/orchestration v1.4.0/go.mod h1:6W5NLFWs2TlniBphAViZEVhrXRSMgUGDfW7vrWKvsBk= -cloud.google.com/go/orchestration v1.6.0/go.mod h1:M62Bevp7pkxStDfFfTuCOaXgaaqRAga1yKyoMtEoWPQ= -cloud.google.com/go/orchestration v1.8.1/go.mod h1:4sluRF3wgbYVRqz7zJ1/EUNc90TTprliq9477fGobD8= -cloud.google.com/go/orgpolicy v1.4.0/go.mod h1:xrSLIV4RePWmP9P3tBl8S93lTmlAxjm06NSm2UTmKvE= -cloud.google.com/go/orgpolicy v1.5.0/go.mod h1:hZEc5q3wzwXJaKrsx5+Ewg0u1LxJ51nNFlext7Tanwc= -cloud.google.com/go/orgpolicy v1.10.0/go.mod h1:w1fo8b7rRqlXlIJbVhOMPrwVljyuW5mqssvBtU18ONc= -cloud.google.com/go/orgpolicy v1.11.0/go.mod h1:2RK748+FtVvnfuynxBzdnyu7sygtoZa1za/0ZfpOs1M= -cloud.google.com/go/orgpolicy v1.11.1/go.mod h1:8+E3jQcpZJQliP+zaFfayC2Pg5bmhuLK755wKhIIUCE= cloud.google.com/go/osconfig v1.7.0/go.mod h1:oVHeCeZELfJP7XLxcBGTMBvRO+1nQ5tFG9VQTmYS2Fs= cloud.google.com/go/osconfig v1.8.0/go.mod h1:EQqZLu5w5XA7eKizepumcvWx+m8mJUhEwiPqWiZeEdg= -cloud.google.com/go/osconfig v1.9.0/go.mod h1:Yx+IeIZJ3bdWmzbQU4fxNl8xsZ4amB+dygAwFPlvnNo= -cloud.google.com/go/osconfig v1.10.0/go.mod h1:uMhCzqC5I8zfD9zDEAfvgVhDS8oIjySWh+l4WK6GnWw= -cloud.google.com/go/osconfig v1.11.0/go.mod h1:aDICxrur2ogRd9zY5ytBLV89KEgT2MKB2L/n6x1ooPw= -cloud.google.com/go/osconfig v1.12.0/go.mod h1:8f/PaYzoS3JMVfdfTubkowZYGmAhUCjjwnjqWI7NVBc= -cloud.google.com/go/osconfig v1.12.1/go.mod h1:4CjBxND0gswz2gfYRCUoUzCm9zCABp91EeTtWXyz0tE= cloud.google.com/go/oslogin v1.4.0/go.mod h1:YdgMXWRaElXz/lDk1Na6Fh5orF7gvmJ0FGLIs9LId4E= cloud.google.com/go/oslogin v1.5.0/go.mod h1:D260Qj11W2qx/HVF29zBg+0fd6YCSjSqLUkY/qEenQU= -cloud.google.com/go/oslogin v1.6.0/go.mod h1:zOJ1O3+dTU8WPlGEkFSh7qeHPPSoxrcMbbK1Nm2iX70= -cloud.google.com/go/oslogin v1.7.0/go.mod h1:e04SN0xO1UNJ1M5GP0vzVBFicIe4O53FOfcixIqTyXo= -cloud.google.com/go/oslogin v1.9.0/go.mod h1:HNavntnH8nzrn8JCTT5fj18FuJLFJc4NaZJtBnQtKFs= -cloud.google.com/go/oslogin v1.10.1/go.mod h1:x692z7yAue5nE7CsSnoG0aaMbNoRJRXO4sn73R+ZqAs= cloud.google.com/go/phishingprotection v0.5.0/go.mod h1:Y3HZknsK9bc9dMi+oE8Bim0lczMU6hrX0UpADuMefr0= cloud.google.com/go/phishingprotection v0.6.0/go.mod h1:9Y3LBLgy0kDTcYET8ZH3bq/7qni15yVUoAxiFxnlSUA= -cloud.google.com/go/phishingprotection v0.7.0/go.mod h1:8qJI4QKHoda/sb/7/YmMQ2omRLSLYSu9bU0EKCNI+Lk= -cloud.google.com/go/phishingprotection v0.8.1/go.mod h1:AxonW7GovcA8qdEk13NfHq9hNx5KPtfxXNeUxTDxB6I= -cloud.google.com/go/policytroubleshooter v1.3.0/go.mod h1:qy0+VwANja+kKrjlQuOzmlvscn4RNsAc0e15GGqfMxg= -cloud.google.com/go/policytroubleshooter v1.4.0/go.mod h1:DZT4BcRw3QoO8ota9xw/LKtPa8lKeCByYeKTIf/vxdE= -cloud.google.com/go/policytroubleshooter v1.5.0/go.mod h1:Rz1WfV+1oIpPdN2VvvuboLVRsB1Hclg3CKQ53j9l8vw= -cloud.google.com/go/policytroubleshooter v1.6.0/go.mod h1:zYqaPTsmfvpjm5ULxAyD/lINQxJ0DDsnWOP/GZ7xzBc= -cloud.google.com/go/policytroubleshooter v1.7.1/go.mod h1:0NaT5v3Ag1M7U5r0GfDCpUFkWd9YqpubBWsQlhanRv0= cloud.google.com/go/privatecatalog v0.5.0/go.mod h1:XgosMUvvPyxDjAVNDYxJ7wBW8//hLDDYmnsNcMGq1K0= cloud.google.com/go/privatecatalog v0.6.0/go.mod h1:i/fbkZR0hLN29eEWiiwue8Pb+GforiEIBnV9yrRUOKI= -cloud.google.com/go/privatecatalog v0.7.0/go.mod h1:2s5ssIFO69F5csTXcwBP7NPFTZvps26xGzvQ2PQaBYg= -cloud.google.com/go/privatecatalog v0.8.0/go.mod h1:nQ6pfaegeDAq/Q5lrfCQzQLhubPiZhSaNhIgfJlnIXs= -cloud.google.com/go/privatecatalog v0.9.1/go.mod h1:0XlDXW2unJXdf9zFz968Hp35gl/bhF4twwpXZAW50JA= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/pubsub v1.5.0/go.mod h1:ZEwJccE3z93Z2HWvstpri00jOg7oO4UZDtKhwDwqF0w= -cloud.google.com/go/pubsub v1.26.0/go.mod h1:QgBH3U/jdJy/ftjPhTkyXNj543Tin1pRYcdcPRnFIRI= -cloud.google.com/go/pubsub v1.27.1/go.mod h1:hQN39ymbV9geqBnfQq6Xf63yNhUAhv9CZhzp5O6qsW0= -cloud.google.com/go/pubsub v1.28.0/go.mod h1:vuXFpwaVoIPQMGXqRyUQigu/AX1S3IWugR9xznmcXX8= -cloud.google.com/go/pubsub v1.30.0/go.mod h1:qWi1OPS0B+b5L+Sg6Gmc9zD1Y+HaM0MdUr7LsupY1P4= -cloud.google.com/go/pubsub v1.32.0/go.mod h1:f+w71I33OMyxf9VpMVcZbnG5KSUkCOUHYpFd5U1GdRc= -cloud.google.com/go/pubsublite v1.5.0/go.mod h1:xapqNQ1CuLfGi23Yda/9l4bBCKz/wC3KIJ5gKcxveZg= -cloud.google.com/go/pubsublite v1.6.0/go.mod h1:1eFCS0U11xlOuMFV/0iBqw3zP12kddMeCbj/F3FSj9k= -cloud.google.com/go/pubsublite v1.7.0/go.mod h1:8hVMwRXfDfvGm3fahVbtDbiLePT3gpoiJYJY+vxWxVM= -cloud.google.com/go/pubsublite v1.8.1/go.mod h1:fOLdU4f5xldK4RGJrBMm+J7zMWNj/k4PxwEZXy39QS0= cloud.google.com/go/recaptchaenterprise v1.3.1/go.mod h1:OdD+q+y4XGeAlxRaMn1Y7/GveP6zmq76byL6tjPE7d4= cloud.google.com/go/recaptchaenterprise/v2 v2.1.0/go.mod h1:w9yVqajwroDNTfGuhmOjPDN//rZGySaf6PtFVcSCa7o= cloud.google.com/go/recaptchaenterprise/v2 v2.2.0/go.mod h1:/Zu5jisWGeERrd5HnlS3EUGb/D335f9k51B/FVil0jk= cloud.google.com/go/recaptchaenterprise/v2 v2.3.0/go.mod h1:O9LwGCjrhGHBQET5CA7dd5NwwNQUErSgEDit1DLNTdo= -cloud.google.com/go/recaptchaenterprise/v2 v2.4.0/go.mod h1:Am3LHfOuBstrLrNCBrlI5sbwx9LBg3te2N6hGvHn2mE= -cloud.google.com/go/recaptchaenterprise/v2 v2.5.0/go.mod h1:O8LzcHXN3rz0j+LBC91jrwI3R+1ZSZEWrfL7XHgNo9U= -cloud.google.com/go/recaptchaenterprise/v2 v2.6.0/go.mod h1:RPauz9jeLtB3JVzg6nCbe12qNoaa8pXc4d/YukAmcnA= -cloud.google.com/go/recaptchaenterprise/v2 v2.7.0/go.mod h1:19wVj/fs5RtYtynAPJdDTb69oW0vNHYDBTbB4NvMD9c= -cloud.google.com/go/recaptchaenterprise/v2 v2.7.2/go.mod h1:kR0KjsJS7Jt1YSyWFkseQ756D45kaYNTlDPPaRAvDBU= cloud.google.com/go/recommendationengine v0.5.0/go.mod h1:E5756pJcVFeVgaQv3WNpImkFP8a+RptV6dDLGPILjvg= cloud.google.com/go/recommendationengine v0.6.0/go.mod h1:08mq2umu9oIqc7tDy8sx+MNJdLG0fUi3vaSVbztHgJ4= -cloud.google.com/go/recommendationengine v0.7.0/go.mod h1:1reUcE3GIu6MeBz/h5xZJqNLuuVjNg1lmWMPyjatzac= -cloud.google.com/go/recommendationengine v0.8.1/go.mod h1:MrZihWwtFYWDzE6Hz5nKcNz3gLizXVIDI/o3G1DLcrE= cloud.google.com/go/recommender v1.5.0/go.mod h1:jdoeiBIVrJe9gQjwd759ecLJbxCDED4A6p+mqoqDvTg= cloud.google.com/go/recommender v1.6.0/go.mod h1:+yETpm25mcoiECKh9DEScGzIRyDKpZ0cEhWGo+8bo+c= -cloud.google.com/go/recommender v1.7.0/go.mod h1:XLHs/W+T8olwlGOgfQenXBTbIseGclClff6lhFVe9Bs= -cloud.google.com/go/recommender v1.8.0/go.mod h1:PkjXrTT05BFKwxaUxQmtIlrtj0kph108r02ZZQ5FE70= -cloud.google.com/go/recommender v1.9.0/go.mod h1:PnSsnZY7q+VL1uax2JWkt/UegHssxjUVVCrX52CuEmQ= -cloud.google.com/go/recommender v1.10.1/go.mod h1:XFvrE4Suqn5Cq0Lf+mCP6oBHD/yRMA8XxP5sb7Q7gpA= cloud.google.com/go/redis v1.7.0/go.mod h1:V3x5Jq1jzUcg+UNsRvdmsfuFnit1cfe3Z/PGyq/lm4Y= cloud.google.com/go/redis v1.8.0/go.mod h1:Fm2szCDavWzBk2cDKxrkmWBqoCiL1+Ctwq7EyqBCA/A= -cloud.google.com/go/redis v1.9.0/go.mod h1:HMYQuajvb2D0LvMgZmLDZW8V5aOC/WxstZHiy4g8OiA= -cloud.google.com/go/redis v1.10.0/go.mod h1:ThJf3mMBQtW18JzGgh41/Wld6vnDDc/F/F35UolRZPM= -cloud.google.com/go/redis v1.11.0/go.mod h1:/X6eicana+BWcUda5PpwZC48o37SiFVTFSs0fWAJ7uQ= -cloud.google.com/go/redis v1.13.1/go.mod h1:VP7DGLpE91M6bcsDdMuyCm2hIpB6Vp2hI090Mfd1tcg= -cloud.google.com/go/resourcemanager v1.3.0/go.mod h1:bAtrTjZQFJkiWTPDb1WBjzvc6/kifjj4QBYuKCCoqKA= -cloud.google.com/go/resourcemanager v1.4.0/go.mod h1:MwxuzkumyTX7/a3n37gmsT3py7LIXwrShilPh3P1tR0= -cloud.google.com/go/resourcemanager v1.5.0/go.mod h1:eQoXNAiAvCf5PXxWxXjhKQoTMaUSNrEfg+6qdf/wots= -cloud.google.com/go/resourcemanager v1.6.0/go.mod h1:YcpXGRs8fDzcUl1Xw8uOVmI8JEadvhRIkoXXUNVYcVo= -cloud.google.com/go/resourcemanager v1.7.0/go.mod h1:HlD3m6+bwhzj9XCouqmeiGuni95NTrExfhoSrkC/3EI= -cloud.google.com/go/resourcemanager v1.9.1/go.mod h1:dVCuosgrh1tINZ/RwBufr8lULmWGOkPS8gL5gqyjdT8= -cloud.google.com/go/resourcesettings v1.3.0/go.mod h1:lzew8VfESA5DQ8gdlHwMrqZs1S9V87v3oCnKCWoOuQU= -cloud.google.com/go/resourcesettings v1.4.0/go.mod h1:ldiH9IJpcrlC3VSuCGvjR5of/ezRrOxFtpJoJo5SmXg= -cloud.google.com/go/resourcesettings v1.5.0/go.mod h1:+xJF7QSG6undsQDfsCJyqWXyBwUoJLhetkRMDRnIoXA= -cloud.google.com/go/resourcesettings v1.6.1/go.mod h1:M7mk9PIZrC5Fgsu1kZJci6mpgN8o0IUzVx3eJU3y4Jw= cloud.google.com/go/retail v1.8.0/go.mod h1:QblKS8waDmNUhghY2TI9O3JLlFk8jybHeV4BF19FrE4= cloud.google.com/go/retail v1.9.0/go.mod h1:g6jb6mKuCS1QKnH/dpu7isX253absFl6iE92nHwlBUY= -cloud.google.com/go/retail v1.10.0/go.mod h1:2gDk9HsL4HMS4oZwz6daui2/jmKvqShXKQuB2RZ+cCc= -cloud.google.com/go/retail v1.11.0/go.mod h1:MBLk1NaWPmh6iVFSz9MeKG/Psyd7TAgm6y/9L2B4x9Y= -cloud.google.com/go/retail v1.12.0/go.mod h1:UMkelN/0Z8XvKymXFbD4EhFJlYKRx1FGhQkVPU5kF14= -cloud.google.com/go/retail v1.14.1/go.mod h1:y3Wv3Vr2k54dLNIrCzenyKG8g8dhvhncT2NcNjb/6gE= -cloud.google.com/go/run v0.2.0/go.mod h1:CNtKsTA1sDcnqqIFR3Pb5Tq0usWxJJvsWOCPldRU3Do= -cloud.google.com/go/run v0.3.0/go.mod h1:TuyY1+taHxTjrD0ZFk2iAR+xyOXEA0ztb7U3UNA0zBo= -cloud.google.com/go/run v0.8.0/go.mod h1:VniEnuBwqjigv0A7ONfQUaEItaiCRVujlMqerPPiktM= -cloud.google.com/go/run v0.9.0/go.mod h1:Wwu+/vvg8Y+JUApMwEDfVfhetv30hCG4ZwDR/IXl2Qg= cloud.google.com/go/scheduler v1.4.0/go.mod h1:drcJBmxF3aqZJRhmkHQ9b3uSSpQoltBPGPxGAWROx6s= cloud.google.com/go/scheduler v1.5.0/go.mod h1:ri073ym49NW3AfT6DZi21vLZrG07GXr5p3H1KxN5QlI= -cloud.google.com/go/scheduler v1.6.0/go.mod h1:SgeKVM7MIwPn3BqtcBntpLyrIJftQISRrYB5ZtT+KOk= -cloud.google.com/go/scheduler v1.7.0/go.mod h1:jyCiBqWW956uBjjPMMuX09n3x37mtyPJegEWKxRsn44= -cloud.google.com/go/scheduler v1.8.0/go.mod h1:TCET+Y5Gp1YgHT8py4nlg2Sew8nUHMqcpousDgXJVQc= -cloud.google.com/go/scheduler v1.9.0/go.mod h1:yexg5t+KSmqu+njTIh3b7oYPheFtBWGcbVUYF1GGMIc= -cloud.google.com/go/scheduler v1.10.1/go.mod h1:R63Ldltd47Bs4gnhQkmNDse5w8gBRrhObZ54PxgR2Oo= cloud.google.com/go/secretmanager v1.6.0/go.mod h1:awVa/OXF6IiyaU1wQ34inzQNc4ISIDIrId8qE5QGgKA= -cloud.google.com/go/secretmanager v1.8.0/go.mod h1:hnVgi/bN5MYHd3Gt0SPuTPPp5ENina1/LxM+2W9U9J4= -cloud.google.com/go/secretmanager v1.9.0/go.mod h1:b71qH2l1yHmWQHt9LC80akm86mX8AL6X1MA01dW8ht4= -cloud.google.com/go/secretmanager v1.10.0/go.mod h1:MfnrdvKMPNra9aZtQFvBcvRU54hbPD8/HayQdlUgJpU= -cloud.google.com/go/secretmanager v1.11.1/go.mod h1:znq9JlXgTNdBeQk9TBW/FnR/W4uChEKGeqQWAJ8SXFw= cloud.google.com/go/security v1.5.0/go.mod h1:lgxGdyOKKjHL4YG3/YwIL2zLqMFCKs0UbQwgyZmfJl4= cloud.google.com/go/security v1.7.0/go.mod h1:mZklORHl6Bg7CNnnjLH//0UlAlaXqiG7Lb9PsPXLfD0= cloud.google.com/go/security v1.8.0/go.mod h1:hAQOwgmaHhztFhiQ41CjDODdWP0+AE1B3sX4OFlq+GU= -cloud.google.com/go/security v1.9.0/go.mod h1:6Ta1bO8LXI89nZnmnsZGp9lVoVWXqsVbIq/t9dzI+2Q= -cloud.google.com/go/security v1.10.0/go.mod h1:QtOMZByJVlibUT2h9afNDWRZ1G96gVywH8T5GUSb9IA= -cloud.google.com/go/security v1.12.0/go.mod h1:rV6EhrpbNHrrxqlvW0BWAIawFWq3X90SduMJdFwtLB8= -cloud.google.com/go/security v1.13.0/go.mod h1:Q1Nvxl1PAgmeW0y3HTt54JYIvUdtcpYKVfIB8AOMZ+0= -cloud.google.com/go/security v1.15.1/go.mod h1:MvTnnbsWnehoizHi09zoiZob0iCHVcL4AUBj76h9fXA= cloud.google.com/go/securitycenter v1.13.0/go.mod h1:cv5qNAqjY84FCN6Y9z28WlkKXyWsgLO832YiWwkCWcU= cloud.google.com/go/securitycenter v1.14.0/go.mod h1:gZLAhtyKv85n52XYWt6RmeBdydyxfPeTrpToDPw4Auc= -cloud.google.com/go/securitycenter v1.15.0/go.mod h1:PeKJ0t8MoFmmXLXWm41JidyzI3PJjd8sXWaVqg43WWk= -cloud.google.com/go/securitycenter v1.16.0/go.mod h1:Q9GMaLQFUD+5ZTabrbujNWLtSLZIZF7SAR0wWECrjdk= -cloud.google.com/go/securitycenter v1.18.1/go.mod h1:0/25gAzCM/9OL9vVx4ChPeM/+DlfGQJDwBy/UC8AKK0= -cloud.google.com/go/securitycenter v1.19.0/go.mod h1:LVLmSg8ZkkyaNy4u7HCIshAngSQ8EcIRREP3xBnyfag= -cloud.google.com/go/securitycenter v1.23.0/go.mod h1:8pwQ4n+Y9WCWM278R8W3nF65QtY172h4S8aXyI9/hsQ= -cloud.google.com/go/servicecontrol v1.4.0/go.mod h1:o0hUSJ1TXJAmi/7fLJAedOovnujSEvjKCAFNXPQ1RaU= -cloud.google.com/go/servicecontrol v1.5.0/go.mod h1:qM0CnXHhyqKVuiZnGKrIurvVImCs8gmqWsDoqe9sU1s= -cloud.google.com/go/servicecontrol v1.10.0/go.mod h1:pQvyvSRh7YzUF2efw7H87V92mxU8FnFDawMClGCNuAA= -cloud.google.com/go/servicecontrol v1.11.0/go.mod h1:kFmTzYzTUIuZs0ycVqRHNaNhgR+UMUpw9n02l/pY+mc= -cloud.google.com/go/servicecontrol v1.11.1/go.mod h1:aSnNNlwEFBY+PWGQ2DoM0JJ/QUXqV5/ZD9DOLB7SnUk= cloud.google.com/go/servicedirectory v1.4.0/go.mod h1:gH1MUaZCgtP7qQiI+F+A+OpeKF/HQWgtAddhTbhL2bs= cloud.google.com/go/servicedirectory v1.5.0/go.mod h1:QMKFL0NUySbpZJ1UZs3oFAmdvVxhhxB6eJ/Vlp73dfg= -cloud.google.com/go/servicedirectory v1.6.0/go.mod h1:pUlbnWsLH9c13yGkxCmfumWEPjsRs1RlmJ4pqiNjVL4= -cloud.google.com/go/servicedirectory v1.7.0/go.mod h1:5p/U5oyvgYGYejufvxhgwjL8UVXjkuw7q5XcG10wx1U= -cloud.google.com/go/servicedirectory v1.8.0/go.mod h1:srXodfhY1GFIPvltunswqXpVxFPpZjf8nkKQT7XcXaY= -cloud.google.com/go/servicedirectory v1.9.0/go.mod h1:29je5JjiygNYlmsGz8k6o+OZ8vd4f//bQLtvzkPPT/s= -cloud.google.com/go/servicedirectory v1.10.1/go.mod h1:Xv0YVH8s4pVOwfM/1eMTl0XJ6bzIOSLDt8f8eLaGOxQ= -cloud.google.com/go/servicemanagement v1.4.0/go.mod h1:d8t8MDbezI7Z2R1O/wu8oTggo3BI2GKYbdG4y/SJTco= -cloud.google.com/go/servicemanagement v1.5.0/go.mod h1:XGaCRe57kfqu4+lRxaFEAuqmjzF0r+gWHjWqKqBvKFo= -cloud.google.com/go/servicemanagement v1.6.0/go.mod h1:aWns7EeeCOtGEX4OvZUWCCJONRZeFKiptqKf1D0l/Jc= -cloud.google.com/go/servicemanagement v1.8.0/go.mod h1:MSS2TDlIEQD/fzsSGfCdJItQveu9NXnUniTrq/L8LK4= -cloud.google.com/go/serviceusage v1.3.0/go.mod h1:Hya1cozXM4SeSKTAgGXgj97GlqUvF5JaoXacR1JTP/E= -cloud.google.com/go/serviceusage v1.4.0/go.mod h1:SB4yxXSaYVuUBYUml6qklyONXNLt83U0Rb+CXyhjEeU= -cloud.google.com/go/serviceusage v1.5.0/go.mod h1:w8U1JvqUqwJNPEOTQjrMHkw3IaIFLoLsPLvsE3xueec= -cloud.google.com/go/serviceusage v1.6.0/go.mod h1:R5wwQcbOWsyuOfbP9tGdAnCAc6B9DRwPG1xtWMDeuPA= -cloud.google.com/go/shell v1.3.0/go.mod h1:VZ9HmRjZBsjLGXusm7K5Q5lzzByZmJHf1d0IWHEN5X4= -cloud.google.com/go/shell v1.4.0/go.mod h1:HDxPzZf3GkDdhExzD/gs8Grqk+dmYcEjGShZgYa9URw= -cloud.google.com/go/shell v1.6.0/go.mod h1:oHO8QACS90luWgxP3N9iZVuEiSF84zNyLytb+qE2f9A= -cloud.google.com/go/shell v1.7.1/go.mod h1:u1RaM+huXFaTojTbW4g9P5emOrrmLE69KrxqQahKn4g= -cloud.google.com/go/spanner v1.7.0/go.mod h1:sd3K2gZ9Fd0vMPLXzeCrF6fq4i63Q7aTLW/lBIfBkIk= -cloud.google.com/go/spanner v1.41.0/go.mod h1:MLYDBJR/dY4Wt7ZaMIQ7rXOTLjYrmxLE/5ve9vFfWos= -cloud.google.com/go/spanner v1.44.0/go.mod h1:G8XIgYdOK+Fbcpbs7p2fiprDw4CaZX63whnSMLVBxjk= -cloud.google.com/go/spanner v1.45.0/go.mod h1:FIws5LowYz8YAE1J8fOS7DJup8ff7xJeetWEo5REA2M= -cloud.google.com/go/spanner v1.47.0/go.mod h1:IXsJwVW2j4UKs0eYDqodab6HgGuA1bViSqW4uH9lfUI= cloud.google.com/go/speech v1.6.0/go.mod h1:79tcr4FHCimOp56lwC01xnt/WPJZc4v3gzyT7FoBkCM= cloud.google.com/go/speech v1.7.0/go.mod h1:KptqL+BAQIhMsj1kOP2la5DSEEerPDuOP/2mmkhHhZQ= -cloud.google.com/go/speech v1.8.0/go.mod h1:9bYIl1/tjsAnMgKGHKmBZzXKEkGgtU+MpdDPTE9f7y0= -cloud.google.com/go/speech v1.9.0/go.mod h1:xQ0jTcmnRFFM2RfX/U+rk6FQNUF6DQlydUSyoooSpco= -cloud.google.com/go/speech v1.14.1/go.mod h1:gEosVRPJ9waG7zqqnsHpYTOoAS4KouMRLDFMekpJ0J0= -cloud.google.com/go/speech v1.15.0/go.mod h1:y6oH7GhqCaZANH7+Oe0BhgIogsNInLlz542tg3VqeYI= -cloud.google.com/go/speech v1.17.1/go.mod h1:8rVNzU43tQvxDaGvqOhpDqgkJTFowBpDvCJ14kGlJYo= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= @@ -633,469 +174,335 @@ cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3f cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= cloud.google.com/go/storage v1.23.0/go.mod h1:vOEEDNFnciUMhBeT6hsJIn3ieU5cFRmzeLgDvXzfIXc= cloud.google.com/go/storage v1.27.0/go.mod h1:x9DOL8TK/ygDUMieqwfhdpQryTeEkhGKMi80i/iqR2s= -cloud.google.com/go/storage v1.28.1/go.mod h1:Qnisd4CqDdo6BGs2AD5LLnEsmSQ80wQ5ogcBBKhU86Y= -cloud.google.com/go/storage v1.29.0/go.mod h1:4puEjyTKnku6gfKoTfNOU/W+a9JyuVNxjpS5GBrB8h4= +cloud.google.com/go/storage v1.30.1 h1:uOdMxAs8HExqBlnLtnQyP0YkvbiDpdGShGKtx6U/oNM= cloud.google.com/go/storage v1.30.1/go.mod h1:NfxhC0UJE1aXSx7CIIbCf7y9HKT7BiccwkR7+P7gN8E= -cloud.google.com/go/storagetransfer v1.5.0/go.mod h1:dxNzUopWy7RQevYFHewchb29POFv3/AaBgnhqzqiK0w= -cloud.google.com/go/storagetransfer v1.6.0/go.mod h1:y77xm4CQV/ZhFZH75PLEXY0ROiS7Gh6pSKrM8dJyg6I= -cloud.google.com/go/storagetransfer v1.7.0/go.mod h1:8Giuj1QNb1kfLAiWM1bN6dHzfdlDAVC9rv9abHot2W4= -cloud.google.com/go/storagetransfer v1.8.0/go.mod h1:JpegsHHU1eXg7lMHkvf+KE5XDJ7EQu0GwNJbbVGanEw= -cloud.google.com/go/storagetransfer v1.10.0/go.mod h1:DM4sTlSmGiNczmV6iZyceIh2dbs+7z2Ayg6YAiQlYfA= cloud.google.com/go/talent v1.1.0/go.mod h1:Vl4pt9jiHKvOgF9KoZo6Kob9oV4lwd/ZD5Cto54zDRw= cloud.google.com/go/talent v1.2.0/go.mod h1:MoNF9bhFQbiJ6eFD3uSsg0uBALw4n4gaCaEjBw9zo8g= -cloud.google.com/go/talent v1.3.0/go.mod h1:CmcxwJ/PKfRgd1pBjQgU6W3YBwiewmUzQYH5HHmSCmM= -cloud.google.com/go/talent v1.4.0/go.mod h1:ezFtAgVuRf8jRsvyE6EwmbTK5LKciD4KVnHuDEFmOOA= -cloud.google.com/go/talent v1.5.0/go.mod h1:G+ODMj9bsasAEJkQSzO2uHQWXHHXUomArjWQQYkqK6c= -cloud.google.com/go/talent v1.6.2/go.mod h1:CbGvmKCG61mkdjcqTcLOkb2ZN1SrQI8MDyma2l7VD24= -cloud.google.com/go/texttospeech v1.4.0/go.mod h1:FX8HQHA6sEpJ7rCMSfXuzBcysDAuWusNNNvN9FELDd8= -cloud.google.com/go/texttospeech v1.5.0/go.mod h1:oKPLhR4n4ZdQqWKURdwxMy0uiTS1xU161C8W57Wkea4= -cloud.google.com/go/texttospeech v1.6.0/go.mod h1:YmwmFT8pj1aBblQOI3TfKmwibnsfvhIBzPXcW4EBovc= -cloud.google.com/go/texttospeech v1.7.1/go.mod h1:m7QfG5IXxeneGqTapXNxv2ItxP/FS0hCZBwXYqucgSk= -cloud.google.com/go/tpu v1.3.0/go.mod h1:aJIManG0o20tfDQlRIej44FcwGGl/cD0oiRyMKG19IQ= -cloud.google.com/go/tpu v1.4.0/go.mod h1:mjZaX8p0VBgllCzF6wcU2ovUXN9TONFLd7iz227X2Xg= -cloud.google.com/go/tpu v1.5.0/go.mod h1:8zVo1rYDFuW2l4yZVY0R0fb/v44xLh3llq7RuV61fPM= -cloud.google.com/go/tpu v1.6.1/go.mod h1:sOdcHVIgDEEOKuqUoi6Fq53MKHJAtOwtz0GuKsWSH3E= -cloud.google.com/go/trace v1.3.0/go.mod h1:FFUE83d9Ca57C+K8rDl/Ih8LwOzWIV1krKgxg6N0G28= -cloud.google.com/go/trace v1.4.0/go.mod h1:UG0v8UBqzusp+z63o7FK74SdFE+AXpCLdFb1rshXG+Y= -cloud.google.com/go/trace v1.8.0/go.mod h1:zH7vcsbAhklH8hWFig58HvxcxyQbaIqMarMg9hn5ECA= -cloud.google.com/go/trace v1.9.0/go.mod h1:lOQqpE5IaWY0Ixg7/r2SjixMuc6lfTFeO4QGM4dQWOk= -cloud.google.com/go/trace v1.10.1/go.mod h1:gbtL94KE5AJLH3y+WVpfWILmqgc6dXcqgNXdOPAQTYk= -cloud.google.com/go/translate v1.3.0/go.mod h1:gzMUwRjvOqj5i69y/LYLd8RrNQk+hOmIXTi9+nb3Djs= -cloud.google.com/go/translate v1.4.0/go.mod h1:06Dn/ppvLD6WvA5Rhdp029IX2Mi3Mn7fpMRLPvXT5Wg= -cloud.google.com/go/translate v1.5.0/go.mod h1:29YDSYveqqpA1CQFD7NQuP49xymq17RXNaUDdc0mNu0= -cloud.google.com/go/translate v1.6.0/go.mod h1:lMGRudH1pu7I3n3PETiOB2507gf3HnfLV8qlkHZEyos= -cloud.google.com/go/translate v1.7.0/go.mod h1:lMGRudH1pu7I3n3PETiOB2507gf3HnfLV8qlkHZEyos= -cloud.google.com/go/translate v1.8.1/go.mod h1:d1ZH5aaOA0CNhWeXeC8ujd4tdCFw8XoNWRljklu5RHs= -cloud.google.com/go/video v1.8.0/go.mod h1:sTzKFc0bUSByE8Yoh8X0mn8bMymItVGPfTuUBUyRgxk= -cloud.google.com/go/video v1.9.0/go.mod h1:0RhNKFRF5v92f8dQt0yhaHrEuH95m068JYOvLZYnJSw= -cloud.google.com/go/video v1.12.0/go.mod h1:MLQew95eTuaNDEGriQdcYn0dTwf9oWiA4uYebxM5kdg= -cloud.google.com/go/video v1.13.0/go.mod h1:ulzkYlYgCp15N2AokzKjy7MQ9ejuynOJdf1tR5lGthk= -cloud.google.com/go/video v1.14.0/go.mod h1:SkgaXwT+lIIAKqWAJfktHT/RbgjSuY6DobxEp0C5yTQ= -cloud.google.com/go/video v1.15.0/go.mod h1:SkgaXwT+lIIAKqWAJfktHT/RbgjSuY6DobxEp0C5yTQ= -cloud.google.com/go/video v1.17.1/go.mod h1:9qmqPqw/Ib2tLqaeHgtakU+l5TcJxCJbhFXM7UJjVzU= cloud.google.com/go/videointelligence v1.6.0/go.mod h1:w0DIDlVRKtwPCn/C4iwZIJdvC69yInhW0cfi+p546uU= cloud.google.com/go/videointelligence v1.7.0/go.mod h1:k8pI/1wAhjznARtVT9U1llUaFNPh7muw8QyOUpavru4= -cloud.google.com/go/videointelligence v1.8.0/go.mod h1:dIcCn4gVDdS7yte/w+koiXn5dWVplOZkE+xwG9FgK+M= -cloud.google.com/go/videointelligence v1.9.0/go.mod h1:29lVRMPDYHikk3v8EdPSaL8Ku+eMzDljjuvRs105XoU= -cloud.google.com/go/videointelligence v1.10.0/go.mod h1:LHZngX1liVtUhZvi2uNS0VQuOzNi2TkY1OakiuoUOjU= -cloud.google.com/go/videointelligence v1.11.1/go.mod h1:76xn/8InyQHarjTWsBR058SmlPCwQjgcvoW0aZykOvo= cloud.google.com/go/vision v1.2.0/go.mod h1:SmNwgObm5DpFBme2xpyOyasvBc1aPdjvMk2bBk0tKD0= cloud.google.com/go/vision/v2 v2.2.0/go.mod h1:uCdV4PpN1S0jyCyq8sIM42v2Y6zOLkZs+4R9LrGYwFo= cloud.google.com/go/vision/v2 v2.3.0/go.mod h1:UO61abBx9QRMFkNBbf1D8B1LXdS2cGiiCRx0vSpZoUo= -cloud.google.com/go/vision/v2 v2.4.0/go.mod h1:VtI579ll9RpVTrdKdkMzckdnwMyX2JILb+MhPqRbPsY= -cloud.google.com/go/vision/v2 v2.5.0/go.mod h1:MmaezXOOE+IWa+cS7OhRRLK2cNv1ZL98zhqFFZaaH2E= -cloud.google.com/go/vision/v2 v2.6.0/go.mod h1:158Hes0MvOS9Z/bDMSFpjwsUrZ5fPrdwuyyvKSGAGMY= -cloud.google.com/go/vision/v2 v2.7.0/go.mod h1:H89VysHy21avemp6xcf9b9JvZHVehWbET0uT/bcuY/0= -cloud.google.com/go/vision/v2 v2.7.2/go.mod h1:jKa8oSYBWhYiXarHPvP4USxYANYUEdEsQrloLjrSwJU= -cloud.google.com/go/vmmigration v1.2.0/go.mod h1:IRf0o7myyWFSmVR1ItrBSFLFD/rJkfDCUTO4vLlJvsE= -cloud.google.com/go/vmmigration v1.3.0/go.mod h1:oGJ6ZgGPQOFdjHuocGcLqX4lc98YQ7Ygq8YQwHh9A7g= -cloud.google.com/go/vmmigration v1.5.0/go.mod h1:E4YQ8q7/4W9gobHjQg4JJSgXXSgY21nA5r8swQV+Xxc= -cloud.google.com/go/vmmigration v1.6.0/go.mod h1:bopQ/g4z+8qXzichC7GW1w2MjbErL54rk3/C843CjfY= -cloud.google.com/go/vmmigration v1.7.1/go.mod h1:WD+5z7a/IpZ5bKK//YmT9E047AD+rjycCAvyMxGJbro= -cloud.google.com/go/vmwareengine v0.1.0/go.mod h1:RsdNEf/8UDvKllXhMz5J40XxDrNJNN4sagiox+OI208= -cloud.google.com/go/vmwareengine v0.2.2/go.mod h1:sKdctNJxb3KLZkE/6Oui94iw/xs9PRNC2wnNLXsHvH8= -cloud.google.com/go/vmwareengine v0.3.0/go.mod h1:wvoyMvNWdIzxMYSpH/R7y2h5h3WFkx6d+1TIsP39WGY= -cloud.google.com/go/vmwareengine v0.4.1/go.mod h1:Px64x+BvjPZwWuc4HdmVhoygcXqEkGHXoa7uyfTgSI0= -cloud.google.com/go/vpcaccess v1.4.0/go.mod h1:aQHVbTWDYUR1EbTApSVvMq1EnT57ppDmQzZ3imqIk4w= -cloud.google.com/go/vpcaccess v1.5.0/go.mod h1:drmg4HLk9NkZpGfCmZ3Tz0Bwnm2+DKqViEpeEpOq0m8= -cloud.google.com/go/vpcaccess v1.6.0/go.mod h1:wX2ILaNhe7TlVa4vC5xce1bCnqE3AeH27RV31lnmZes= -cloud.google.com/go/vpcaccess v1.7.1/go.mod h1:FogoD46/ZU+JUBX9D606X21EnxiszYi2tArQwLY4SXs= cloud.google.com/go/webrisk v1.4.0/go.mod h1:Hn8X6Zr+ziE2aNd8SliSDWpEnSS1u4R9+xXZmFiHmGE= cloud.google.com/go/webrisk v1.5.0/go.mod h1:iPG6fr52Tv7sGk0H6qUFzmL3HHZev1htXuWDEEsqMTg= -cloud.google.com/go/webrisk v1.6.0/go.mod h1:65sW9V9rOosnc9ZY7A7jsy1zoHS5W9IAXv6dGqhMQMc= -cloud.google.com/go/webrisk v1.7.0/go.mod h1:mVMHgEYH0r337nmt1JyLthzMr6YxwN1aAIEc2fTcq7A= -cloud.google.com/go/webrisk v1.8.0/go.mod h1:oJPDuamzHXgUc+b8SiHRcVInZQuybnvEW72PqTc7sSg= -cloud.google.com/go/webrisk v1.9.1/go.mod h1:4GCmXKcOa2BZcZPn6DCEvE7HypmEJcJkr4mtM+sqYPc= -cloud.google.com/go/websecurityscanner v1.3.0/go.mod h1:uImdKm2wyeXQevQJXeh8Uun/Ym1VqworNDlBXQevGMo= -cloud.google.com/go/websecurityscanner v1.4.0/go.mod h1:ebit/Fp0a+FWu5j4JOmJEV8S8CzdTkAS77oDsiSqYWQ= -cloud.google.com/go/websecurityscanner v1.5.0/go.mod h1:Y6xdCPy81yi0SQnDY1xdNTNpfY1oAgXUlcfN3B3eSng= -cloud.google.com/go/websecurityscanner v1.6.1/go.mod h1:Njgaw3rttgRHXzwCB8kgCYqv5/rGpFCsBOvPbYgszpg= cloud.google.com/go/workflows v1.6.0/go.mod h1:6t9F5h/unJz41YqfBmqSASJSXccBLtD1Vwf+KmJENM0= cloud.google.com/go/workflows v1.7.0/go.mod h1:JhSrZuVZWuiDfKEFxU0/F1PQjmpnpcoISEXH2bcHC3M= -cloud.google.com/go/workflows v1.8.0/go.mod h1:ysGhmEajwZxGn1OhGOGKsTXc5PyxOc0vfKf5Af+to4M= -cloud.google.com/go/workflows v1.9.0/go.mod h1:ZGkj1aFIOd9c8Gerkjjq7OW7I5+l6cSvT3ujaO/WwSA= -cloud.google.com/go/workflows v1.10.0/go.mod h1:fZ8LmRmZQWacon9UCX1r/g/DfAXx5VcPALq2CxzdePw= -cloud.google.com/go/workflows v1.11.1/go.mod h1:Z+t10G1wF7h8LgdY/EmRcQY8ptBD/nvofaL6FqlET6g= -contrib.go.opencensus.io/exporter/stackdriver v0.13.4/go.mod h1:aXENhDJ1Y4lIg4EUaVTwzvYETVNZk10Pu26tevFKLUc= +cosmossdk.io/api v0.3.1 h1:NNiOclKRR0AOlO4KIqeaG6PS6kswOMhHD0ir0SscNXE= +cosmossdk.io/api v0.3.1/go.mod h1:DfHfMkiNA2Uhy8fj0JJlOCYOBp4eWUUJ1te5zBGNyIw= +cosmossdk.io/core v0.5.1 h1:vQVtFrIYOQJDV3f7rw4pjjVqc1id4+mE0L9hHP66pyI= +cosmossdk.io/core v0.5.1/go.mod h1:KZtwHCLjcFuo0nmDc24Xy6CRNEL9Vl/MeimQ2aC7NLE= +cosmossdk.io/depinject v1.0.0-alpha.4 h1:PLNp8ZYAMPTUKyG9IK2hsbciDWqna2z1Wsl98okJopc= +cosmossdk.io/depinject v1.0.0-alpha.4/go.mod h1:HeDk7IkR5ckZ3lMGs/o91AVUc7E596vMaOmslGFM3yU= +cosmossdk.io/errors v1.0.1 h1:bzu+Kcr0kS/1DuPBtUFdWjzLqyUuCiyHjyJB6srBV/0= +cosmossdk.io/errors v1.0.1/go.mod h1:MeelVSZThMi4bEakzhhhE/CKqVv3nOJDA25bIqRDu/U= +cosmossdk.io/log v1.3.0 h1:L0Z0XstClo2kOU4h3V1iDoE5Ji64sg5HLOogzGg67Oo= +cosmossdk.io/log v1.3.0/go.mod h1:HIDyvWLqZe2ovlWabsDN4aPMpY/nUEquAhgfTf2ZzB8= +cosmossdk.io/math v1.2.0 h1:8gudhTkkD3NxOP2YyyJIYYmt6dQ55ZfJkDOaxXpy7Ig= +cosmossdk.io/math v1.2.0/go.mod h1:l2Gnda87F0su8a/7FEKJfFdJrM0JZRXQaohlgJeyQh0= +cosmossdk.io/tools/rosetta v0.2.1 h1:ddOMatOH+pbxWbrGJKRAawdBkPYLfKXutK9IETnjYxw= +cosmossdk.io/tools/rosetta v0.2.1/go.mod h1:Pqdc1FdvkNV3LcNIkYWt2RQY6IP1ge6YWZk8MhhO9Hw= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -filippo.io/edwards25519 v1.0.0-beta.2 h1:/BZRNzm8N4K4eWfK28dL4yescorxtO7YG1yun8fy+pI= -filippo.io/edwards25519 v1.0.0-beta.2/go.mod h1:X+pm78QAUPtFLi1z9PYIlS/bdDnvbCOGKtZ+ACWEf7o= -gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8= -git.sr.ht/~sbinet/gg v0.3.1/go.mod h1:KGYtlADtqsqANL9ueOFkWymvzUvLMQllU5Ixo+8v3pc= -github.com/Antonboom/errname v0.1.4/go.mod h1:jRXo3m0E0EuCnK3wbsSVH3X55Z4iTDLl6ZfCxwFj4TM= -github.com/Azure/azure-pipeline-go v0.2.1/go.mod h1:UGSo8XybXnIGZ3epmeBw7Jdz+HiUVpqIlpz/HKHylF4= -github.com/Azure/azure-pipeline-go v0.2.2/go.mod h1:4rQ/NZncSvGqNkkOsNpOU1tgoNuIlp9AfUH5G1tvCHc= -github.com/Azure/azure-storage-blob-go v0.7.0/go.mod h1:f9YQKtsG1nMisotuTPpO0tjNuEjKRYAcJU8/ydDI++4= -github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= +filippo.io/edwards25519 v1.0.0 h1:0wAIcmJUqRdI8IJ/3eGi5/HwXZWPujYXXlkrQogz0Ek= +filippo.io/edwards25519 v1.0.0/go.mod h1:N1IkdkCkiLB6tki+MYJoSx2JTY9NUlxZE7eHn5EwJns= +github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs= +github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= -github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= -github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= -github.com/Azure/go-autorest/autorest/adal v0.8.0/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc= -github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA= -github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g= -github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= -github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= -github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM= -github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= -github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/ChainSafe/go-schnorrkel v0.0.0-20200405005733-88cbf1b4c40d h1:nalkkPQcITbvhmL4+C4cKA87NW0tfm3Kl9VXRoPywFg= -github.com/ChainSafe/go-schnorrkel v0.0.0-20200405005733-88cbf1b4c40d/go.mod h1:URdX5+vg25ts3aCh8H5IFZybJYKWhJHYMTnf+ULtoC4= +github.com/ChainSafe/go-schnorrkel v1.0.0 h1:3aDA67lAykLaG1y3AOjs88dMxC88PgUuHRrLeDnvGIM= +github.com/ChainSafe/go-schnorrkel v1.0.0/go.mod h1:dpzHYVxLZcp8pjlV+O+UR8K0Hp/z7vcchBSbMBEhCw4= github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= -github.com/DataDog/zstd v1.4.1/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo= -github.com/DataDog/zstd v1.4.5 h1:EndNeuB0l9syBZhut0wns3gV1hL8zX8LIu6ZiVHWLIQ= -github.com/DataDog/zstd v1.4.5/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo= -github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= -github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0CRv0ky0k6m906ixxpzmDRLvX58TFUKS2eePweuyxk= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= -github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/Masterminds/sprig v2.15.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= -github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= -github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA= -github.com/Microsoft/go-winio v0.5.0 h1:Elr9Wn+sGKPlkaBvwu4mTrxtmOp3F3yV9qhaHbXGjwU= -github.com/Microsoft/go-winio v0.5.0/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/go-winio v0.6.0 h1:slsWYD/zyx7lCXoZVlvQrj0hPTM1HI4+v1sIda2yDvg= +github.com/Microsoft/go-winio v0.6.0/go.mod h1:cTAf44im0RAYeL23bpB+fzCyDH2MJiz2BO69KH/soAE= github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= -github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk= github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/OpenPeeDeeP/depguard v1.0.1/go.mod h1:xsIw86fROiiwelg+jB2uM9PiKihMMmUx/1V+TNhjQvM= github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= -github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg= -github.com/StackExchange/wmi v1.2.1/go.mod h1:rcmrprowKIVzvc+NUiLncP2uuArMWLCbu9SBzvHz7e8= -github.com/VictoriaMetrics/fastcache v1.5.7/go.mod h1:ptDBkNMQI4RtmVo8VS/XwRY6RoTu1dAWCbrk+6WsEM8= github.com/VividCortex/gohistogram v1.0.0 h1:6+hBz+qvs0JOrrNhhmR7lFxo5sINxBCGXrdtl/UvroE= github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= -github.com/Workiva/go-datastructures v1.0.52 h1:PLSK6pwn8mYdaoaCZEMsXBpBotr4HHn9abU0yMQt0NI= -github.com/Workiva/go-datastructures v1.0.52/go.mod h1:Z+F2Rca0qCsVYDS8z7bAGm8f3UkzuWYS/oBZz5a7VVA= -github.com/Zilliqa/gozilliqa-sdk v1.2.1-0.20201201074141-dd0ecada1be6/go.mod h1:eSYp2T6f0apnuW8TzhV3f6Aff2SE8Dwio++U4ha4yEM= -github.com/adlio/schema v1.1.13 h1:LeNMVg5Z1FX+Qgz8tJUijBLRdcpbFUElz+d1489On98= -github.com/adlio/schema v1.1.13/go.mod h1:L5Z7tw+7lRK1Fnpi/LT/ooCP1elkXn0krMWBQHUhEDE= -github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= +github.com/adlio/schema v1.3.3 h1:oBJn8I02PyTB466pZO1UZEn1TV5XLlifBSyMrmHl/1I= github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= -github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm/4RlzPXRlREEwqTHAN3T56Bv2ITsFT3gY= -github.com/ajstarks/deck/generate v0.0.0-20210309230005-c3f852c02e19/go.mod h1:T13YZdzov6OU0A1+RfKZiZN9ca6VeKdBdyDV+BY97Tk= -github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw= -github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b/go.mod h1:1KcenG0jGWcpt8ov532z81sp/kMMUG485J2InIOyADM= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= -github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE= -github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156/go.mod h1:Cb/ax3seSYIx7SuZdm2G2xzfwmv3TPSk2ucNfQESPXM= -github.com/andybalholm/brotli v1.0.0/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= -github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= -github.com/antihax/optional v0.0.0-20180407024304-ca021399b1a6/go.mod h1:V8iCPQYkqmusNa815XgQio277wI47sdRh1dUOLdyC6Q= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/aokoli/goutils v1.0.1/go.mod h1:SijmP0QR8LtwsmDs8Yii5Z/S4trXFGFC2oO5g9DP+DQ= -github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0= -github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI= -github.com/apache/arrow/go/v12 v12.0.0/go.mod h1:d+tV/eHZZ7Dz7RPrFKtPK02tpr+c9/PEd/zm8mDS9Vg= github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= -github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= -github.com/aristanetworks/goarista v0.0.0-20170210015632-ea17b1a17847/go.mod h1:D/tb0zPVXnP7fmsLZjtdUhSsumbK/ij54UXjjVgMGxQ= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= -github.com/armon/go-metrics v0.3.9 h1:O2sNqxBdvq8Eq5xmzljcYzAORli6RWCvEym4cJf9m18= -github.com/armon/go-metrics v0.3.9/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= +github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= +github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= -github.com/ashanbrown/forbidigo v1.2.0/go.mod h1:vVW7PEdqEFqapJe95xHkTfB1+XvZXBFg8t0sG2FIxmI= -github.com/ashanbrown/makezero v0.0.0-20210520155254-b6261585ddde/go.mod h1:oG9Dnez7/ESBqc4EdrdNlryeo7d0KcW1ftXHm7nU/UU= github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= -github.com/aws/aws-sdk-go v1.23.20/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= -github.com/aws/aws-sdk-go v1.25.37/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= -github.com/aws/aws-sdk-go v1.25.48/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= -github.com/aws/aws-sdk-go v1.36.30/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= +github.com/aws/aws-sdk-go v1.44.122/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= +github.com/aws/aws-sdk-go v1.44.203 h1:pcsP805b9acL3wUqa4JR2vg1k2wnItkDYNvfmcy6F+U= +github.com/aws/aws-sdk-go v1.44.203/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= +github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY= +github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d h1:xDfNPAt8lFiC1UJrqV3uuy861HCTo708pDMbjHHdCas= +github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ00z/TKoufEY6K/a0k6AhaJrQKdFe6OfVXsa4= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA= -github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84= -github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= -github.com/bkielbasa/cyclop v1.2.0/go.mod h1:qOI0yy6A7dYC4Zgsa72Ppm9kONl0RoIlPbzot9mhmeI= -github.com/bombsimon/wsl/v3 v3.3.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc= -github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= -github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= -github.com/btcsuite/btcd v0.0.0-20171128150713-2e60448ffcc6/go.mod h1:Dmm/EzmjnCiweXmzRIAiUWCInVmPgjkzgv5k4tVyXiQ= -github.com/btcsuite/btcd v0.0.0-20190115013929-ed77733ec07d/go.mod h1:d3C0AkH6BRcvO8T0UEPu53cnw4IbV63x1bEjildYhO0= -github.com/btcsuite/btcd v0.0.0-20190315201642-aa6e0f35703c/go.mod h1:DrZx5ec/dmnfpw9KyYoQyYo7d0KEvTkk/5M/vbZjAr8= -github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= -github.com/btcsuite/btcd v0.21.0-beta/go.mod h1:ZSWyehm27aAuS9bvkATT+Xte3hjHZ+MRgMY/8NJ7K94= -github.com/btcsuite/btcd v0.22.0-beta h1:LTDpDKUM5EeOFBPM8IXpinEcmZ6FWfNZbE3lfrfdnWo= -github.com/btcsuite/btcd v0.22.0-beta/go.mod h1:9n5ntfhhHQBIhUvlhDvD3Qg6fRUj4jkN0VB8L8svzOA= -github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= -github.com/btcsuite/btcutil v0.0.0-20180706230648-ab6388e0c60a/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= -github.com/btcsuite/btcutil v0.0.0-20190207003914-4c204d697803/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= -github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= -github.com/btcsuite/btcutil v1.0.2/go.mod h1:j9HUFwoQRsZL3V4n+qG+CUnEGHOarIxfC3Le2Yhbcts= -github.com/btcsuite/btcutil v1.0.3-0.20201208143702-a53e38424cce h1:YtWJF7RHm2pYCvA5t0RPmAaLUhREsKuKd+SLhxFbFeQ= -github.com/btcsuite/btcutil v1.0.3-0.20201208143702-a53e38424cce/go.mod h1:0DVlHczLPewLcPGEIeUEzfOJhqGPQ0mJJRDBtD307+o= -github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg= -github.com/btcsuite/goleveldb v0.0.0-20160330041536-7834afc9e8cd/go.mod h1:F+uVaaLLH7j4eDXPRvw78tMflu7Ie2bzYOH4Y8rRKBY= -github.com/btcsuite/goleveldb v1.0.0/go.mod h1:QiK9vBlgftBg6rWQIj6wFzbPfRjiykIEhBH4obrXJ/I= -github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc= -github.com/btcsuite/snappy-go v1.0.0/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc= -github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtEyQwv5/p4Mg4C0fgbePVuGr935/5ddU9Z3TmDRY= -github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs= +github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 h1:41iFGWnSlI2gVpmOtVTJZNodLdLQLn/KsJqFvXwnd/s= +github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/btcsuite/btcd/btcec/v2 v2.3.2 h1:5n0X6hX0Zk+6omWcihdYvdAlGf2DfasC0GMf7DClJ3U= +github.com/btcsuite/btcd/btcec/v2 v2.3.2/go.mod h1:zYzJ8etWJQIv1Ogk7OzpWjowwOdXY1W/17j2MW85J04= +github.com/btcsuite/btcd/btcutil v1.1.2 h1:XLMbX8JQEiwMcYft2EGi8zPUkoa0abKIU6/BJSRsjzQ= +github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 h1:q0rUy8C/TYNBQS1+CGKw68tLOFYSNEs0TFnxxnS9+4U= +github.com/bufbuild/buf v1.7.0 h1:uWRjhIXcrWkzIkA5TqXGyJbF51VW54QJsQZ3nwaes5Q= +github.com/bufbuild/buf v1.7.0/go.mod h1:Go40fMAF46PnPLC7jJgTQhAI95pmC0+VtxFKVC0qLq0= +github.com/bufbuild/connect-go v1.0.0 h1:htSflKUT8y1jxhoPhPYTZMrsY3ipUXjjrbcZR5O2cVo= +github.com/bufbuild/connect-go v1.0.0/go.mod h1:9iNvh/NOsfhNBUH5CtvXeVUskQO1xsrEviH7ZArwZ3I= +github.com/bufbuild/protocompile v0.4.0 h1:LbFKd2XowZvQ/kajzguUp2DC9UEIQhIq77fZZlaQsNA= +github.com/bufbuild/protocompile v0.4.0/go.mod h1:3v93+mbWn/v3xzN+31nwkJfrEpAUwp+BagBSZWx+TP8= +github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= +github.com/bytedance/sonic v1.8.0 h1:ea0Xadu+sHlu7x5O3gKhRpQ1IKiMrSiHttPF0ybECuA= +github.com/bytedance/sonic v1.8.0/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U= github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= +github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/cenkalti/backoff/v4 v4.1.3 h1:cFAlzYUlVYDysBEH2T5hyJZMh3+5+WCBvSnK6Q8UtC4= +github.com/cenkalti/backoff/v4 v4.1.3/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/census-instrumentation/opencensus-proto v0.4.1/go.mod h1:4T9NM4+4Vw91VeyqjLS6ao50K5bOcLKN6Q42XnYaRYw= -github.com/cespare/cp v0.1.0/go.mod h1:SOGHArjBr4JWaSDEVpWpo/hNg6RoKrls6Oh40hiwW+s= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/charithe/durationcheck v0.0.8/go.mod h1:SSbRIBVfMjCi/kEB6K65XEA83D6prSM8ap1UCpNKtgg= -github.com/chavacava/garif v0.0.0-20210405164556-e8a0a408d6af/go.mod h1:Qjyv4H3//PWVzTeCezG2b9IRn6myJxJSr4TD/xo6ojU= -github.com/checkpoint-restore/go-criu/v5 v5.0.0/go.mod h1:cfwC0EG7HMUenopBsUf9d89JlCLQIfgVcNsNN0t6T2M= +github.com/cheggaaa/pb v1.0.27/go.mod h1:pQciLPpbU0oxA0h+VJYYLxO+XeDQb5pZijXscXHm81s= +github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= +github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams= +github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/logex v1.2.1 h1:XHDu3E6q+gdHgsdTPH6ImJMIp436vR6MPtH8gP05QzM= +github.com/chzyer/logex v1.2.1/go.mod h1:JLbx6lG2kDbNRFnfkgvh4eRJRPX1QCoOIWomwysCBrQ= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/readline v1.5.1 h1:upd/6fQk4src78LMRzh5vItIt361/o4uq553V8B5sGI= +github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/cilium/ebpf v0.6.2/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs= +github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04= +github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= -github.com/cloudflare/cloudflare-go v0.10.2-0.20190916151808-a80f83b9add9/go.mod h1:1MxXX1Ux4x6mqPmjkUgTP1CdXIBXKX7T+Jk9Gxrmx+U= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= -github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20220314180256-7f1daf1720fc/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20230105202645-06c439db220b/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20230310173818-32f1caf87195/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4 h1:/inchEIKaYC1Akx+H+gqO04wryn5h75LSazbRlnya1k= +github.com/cockroachdb/apd/v2 v2.0.2 h1:weh8u7Cneje73dDh+2tEVLUvyBc89iwepWCD8b8034E= github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= +github.com/cockroachdb/errors v1.10.0 h1:lfxS8zZz1+OjtV4MtNWgboi/W5tyLEB6VQZBXN+0VUU= +github.com/cockroachdb/errors v1.10.0/go.mod h1:lknhIsEVQ9Ss/qKDBQS/UqFSvPQjOwNq2qyKAxtHRqE= +github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b h1:r6VH0faHjZeQy818SGhaone5OnYfxFR/+AzdY3sf5aE= +github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs= +github.com/cockroachdb/redact v1.1.5 h1:u1PMllDkdFfPWaNGMyLD1+so+aq3uUItthCFqzwPJ30= +github.com/cockroachdb/redact v1.1.5/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= -github.com/coinbase/rosetta-sdk-go v0.6.10 h1:rgHD/nHjxLh0lMEdfGDqpTtlvtSBwULqrrZ2qPdNaCM= -github.com/coinbase/rosetta-sdk-go v0.6.10/go.mod h1:J/JFMsfcePrjJZkwQFLh+hJErkAmdm9Iyy3D5Y0LfXo= -github.com/confio/ics23/go v0.6.6 h1:pkOy18YxxJ/r0XFDCnrl4Bjv6h4LkBSpLS6F38mrKL8= -github.com/confio/ics23/go v0.6.6/go.mod h1:E45NqnlpxGnpfTWL/xauN7MRwEE28T4Dd4uraToOaKg= -github.com/containerd/console v1.0.2/go.mod h1:ytZPjGgY2oeTkAONYafi2kSj0aYggsf8acV1PGKCbzQ= -github.com/containerd/continuity v0.0.0-20190827140505-75bee3e2ccb6 h1:NmTXa/uVnDyp0TY5MKi197+3HWcnYWfnHGyaFthlnGw= -github.com/containerd/continuity v0.0.0-20190827140505-75bee3e2ccb6/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= -github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= +github.com/coinbase/rosetta-sdk-go/types v1.0.0 h1:jpVIwLcPoOeCR6o1tU+Xv7r5bMONNbHU7MuEHboiFuA= +github.com/coinbase/rosetta-sdk-go/types v1.0.0/go.mod h1:eq7W2TMRH22GTW0N0beDnN931DW0/WOI1R2sdHNHG4c= +github.com/cometbft/cometbft-db v0.8.0 h1:vUMDaH3ApkX8m0KZvOFFy9b5DZHBAjsnEuo9AKVZpjo= +github.com/cometbft/cometbft-db v0.8.0/go.mod h1:6ASCP4pfhmrCBpfk01/9E1SI29nD3HfVHrY4PG8x5c0= +github.com/confio/ics23/go v0.9.0 h1:cWs+wdbS2KRPZezoaaj+qBleXgUk5WOQFMP3CQFGTr4= +github.com/confio/ics23/go v0.9.0/go.mod h1:4LPZ2NYqnYIVRklaozjNR1FScgDJ2s5Xrp+e/mYVRak= +github.com/containerd/containerd v1.6.8 h1:h4dOFDwzHmqFEP754PgfgTeVXFnLiRc6kiqC7tplDJs= +github.com/containerd/containerd v1.6.8/go.mod h1:By6p5KqPK0/7/CgO/A6t/Gz+CUYUu2zf1hUaaymVXB0= +github.com/containerd/continuity v0.3.0 h1:nisirsYROK15TAMVukJOUyGJjz4BNQJBVsNvAXZJ/eg= +github.com/containerd/typeurl v1.0.2 h1:Chlt8zIieDbzQFzXzAeBEF92KhExuE4p9p92/QmY7aY= +github.com/containerd/typeurl v1.0.2/go.mod h1:9trJWW2sRlGub4wZJRTW83VtbOLS6hwcDZXTn6oPz9s= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd v0.0.0-20190620071333-e64a0ec8b42a/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/cosmos/btcutil v1.0.4 h1:n7C2ngKXo7UC9gNyMNLbzqz7Asuf+7Qv4gnX/rOdQ44= -github.com/cosmos/btcutil v1.0.4/go.mod h1:Ffqc8Hn6TJUdDgHBwIZLtrLQC1KdJ9jGJl/TvgUaxbU= -github.com/cosmos/cosmos-proto v0.0.0-20210914142853-23ed61ac79ce h1:nin7WtIMETZ8LezEYa5e9/iqyEgQka1x0cQYqgUeTGM= -github.com/cosmos/cosmos-proto v0.0.0-20210914142853-23ed61ac79ce/go.mod h1:g2Q3cd94kOBVRAv7ahdtO27yUc4cpNuHGnI40qanl1k= -github.com/cosmos/cosmos-sdk v0.44.2/go.mod h1:fwQJdw+aECatpTvQTo1tSfHEsxACdZYU80QCZUPnHr4= -github.com/cosmos/cosmos-sdk v0.44.3/go.mod h1:bA3+VenaR/l/vDiYzaiwbWvRPWHMBX2jG0ygiFtiBp0= -github.com/cosmos/cosmos-sdk v0.44.5 h1:t5h+KPzZb0Zsag1RP1DCMQlyJyIQqJcqSPJrbUCDGHY= -github.com/cosmos/cosmos-sdk v0.44.5/go.mod h1:maUA6m2TBxOJZkbwl0eRtEBgTX37kcaiOWU5t1HEGaY= +github.com/cosmos/btcutil v1.0.5 h1:t+ZFcX77LpKtDBhjucvnOH8C2l2ioGsBNEQ3jef8xFk= +github.com/cosmos/btcutil v1.0.5/go.mod h1:IyB7iuqZMJlthe2tkIFL33xPyzbFYP0XVdS8P5lUPis= +github.com/cosmos/cosmos-proto v1.0.0-beta.2 h1:X3OKvWgK9Gsejo0F1qs5l8Qn6xJV/AzgIWR2wZ8Nua8= +github.com/cosmos/cosmos-proto v1.0.0-beta.2/go.mod h1:+XRCLJ14pr5HFEHIUcn51IKXD1Fy3rkEQqt4WqmN4V0= +github.com/cosmos/cosmos-sdk v0.47.8 h1:kzYF2xhnfi8dy15t2VVS24tc2KcuU4JBgjh9yCFx4y4= +github.com/cosmos/cosmos-sdk v0.47.8/go.mod h1:VTAtthIsmfplanhFfUTfT6ED4F+kkJxT7nmvmKXRthI= github.com/cosmos/go-bip39 v0.0.0-20180819234021-555e2067c45d/go.mod h1:tSxLoYXyBmiFeKpvmq4dzayMdCjCnu8uqmCysIGBT2Y= github.com/cosmos/go-bip39 v1.0.0 h1:pcomnQdrdH22njcAatO0yWojsUnCO3y2tNoV1cb6hHY= github.com/cosmos/go-bip39 v1.0.0/go.mod h1:RNJv0H/pOIVgxw6KS7QeX2a0Uo0aKUlfhZ4xuwvCdJw= -github.com/cosmos/iavl v0.17.1/go.mod h1:7aisPZK8yCpQdy3PMvKeO+bhq1NwDjUwjzxwwROUxFk= -github.com/cosmos/iavl v0.17.3 h1:s2N819a2olOmiauVa0WAhoIJq9EhSXE9HDBAoR9k+8Y= -github.com/cosmos/iavl v0.17.3/go.mod h1:prJoErZFABYZGDHka1R6Oay4z9PrNeFFiMKHDAMOi4w= -github.com/cosmos/ibc-go v1.2.2/go.mod h1:XmYjsRFOs6Q9Cz+CSsX21icNoH27vQKb3squgnCOCbs= -github.com/cosmos/keyring v1.1.7-0.20210622111912-ef00f8ac3d76 h1:DdzS1m6o/pCqeZ8VOAit/gyATedRgjvkVI+UCrLpyuU= -github.com/cosmos/keyring v1.1.7-0.20210622111912-ef00f8ac3d76/go.mod h1:0mkLWIoZuQ7uBoospo5Q9zIpqq6rYCPJDSUdeCJvPM8= -github.com/cosmos/ledger-cosmos-go v0.11.1 h1:9JIYsGnXP613pb2vPjFeMMjBI5lEDsEaF6oYorTy6J4= -github.com/cosmos/ledger-cosmos-go v0.11.1/go.mod h1:J8//BsAGTo3OC/vDLjMRFLW6q0WAaXvHnVc7ZmE8iUY= -github.com/cosmos/ledger-go v0.9.2 h1:Nnao/dLwaVTk1Q5U9THldpUMMXU94BOTWPddSmVB6pI= -github.com/cosmos/ledger-go v0.9.2/go.mod h1:oZJ2hHAZROdlHiwTg4t7kP+GKIIkBT+o6c9QWFanOyI= +github.com/cosmos/gogogateway v1.2.0 h1:Ae/OivNhp8DqBi/sh2A8a1D0y638GpL3tkmLQAiKxTE= +github.com/cosmos/gogogateway v1.2.0/go.mod h1:iQpLkGWxYcnCdz5iAdLcRBSw3h7NXeOkZ4GUkT+tbFI= +github.com/cosmos/gogoproto v1.4.2/go.mod h1:cLxOsn1ljAHSV527CHOtaIP91kK6cCrZETRBrkzItWU= +github.com/cosmos/gogoproto v1.4.10 h1:QH/yT8X+c0F4ZDacDv3z+xE3WU1P1Z3wQoLMBRJoKuI= +github.com/cosmos/gogoproto v1.4.10/go.mod h1:3aAZzeRWpAwr+SS/LLkICX2/kDFyaYVzckBDzygIxek= +github.com/cosmos/iavl v0.20.1 h1:rM1kqeG3/HBT85vsZdoSNsehciqUQPWrR4BYmqE2+zg= +github.com/cosmos/iavl v0.20.1/go.mod h1:WO7FyvaZJoH65+HFOsDir7xU9FWk2w9cHXNW1XHcl7A= +github.com/cosmos/keyring v1.2.0 h1:8C1lBP9xhImmIabyXW4c3vFjjLiBdGCmfLUfeZlV1Yo= +github.com/cosmos/keyring v1.2.0/go.mod h1:fc+wB5KTk9wQ9sDx0kFXB3A0MaeGHM9AwRStKOQ5vOA= +github.com/cosmos/ledger-cosmos-go v0.12.4 h1:drvWt+GJP7Aiw550yeb3ON/zsrgW0jgh5saFCr7pDnw= +github.com/cosmos/ledger-cosmos-go v0.12.4/go.mod h1:fjfVWRf++Xkygt9wzCsjEBdjcf7wiiY35fv3ctT+k4M= +github.com/cosmos/rosetta-sdk-go v0.10.0 h1:E5RhTruuoA7KTIXUcMicL76cffyeoyvNybzUGSKFTcM= +github.com/cosmos/rosetta-sdk-go v0.10.0/go.mod h1:SImAZkb96YbwvoRkzSMQB6noNJXFgWl/ENIznEoYQI4= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creachadair/taskgroup v0.3.2 h1:zlfutDS+5XG40AOxcHDSThxKzns8Tnr9jnr6VqkYlkM= +github.com/creachadair/taskgroup v0.3.2/go.mod h1:wieWwecHVzsidg2CsUnFinW1faVN4+kq+TDlRJQ0Wbk= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4= -github.com/daixiang0/gci v0.2.9/go.mod h1:+4dZ7TISfSmqfAGv59ePaHfNzgGtIkHAhhdKggP1JAc= -github.com/danieljoos/wincred v1.0.2 h1:zf4bhty2iLuwgjgpraD2E9UbvO+fe54XXGJbOwe23fU= -github.com/danieljoos/wincred v1.0.2/go.mod h1:SnuYRW9lp1oJrZX/dXJqr0cPK5gYXqx3EJbmjhLdK9U= -github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw= +github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/danieljoos/wincred v1.1.2 h1:QLdCxFs1/Yl4zduvBdcHB8goaYk9RARS2SgLLRuAyr0= +github.com/danieljoos/wincred v1.1.2/go.mod h1:GijpziifJoIBfYh+S7BbkdUTU4LfM+QnGqR5Vl2tAx0= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/deckarep/golang-set v0.0.0-20180603214616-504e848d77ea/go.mod h1:93vsz/8Wt4joVM7c2AVqh+YRMiUSc14yDtF28KmMOgQ= -github.com/decred/dcrd/lru v1.0.0/go.mod h1:mxKOwFd7lFjN2GZYsiz/ecgqR6kkYAl+0pz0tEMk218= -github.com/denis-tingajkin/go-header v0.4.2/go.mod h1:eLRHAVXzE5atsKAnNRDB90WHCFFnBUn4RN0nRcs1LJA= +github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 h1:HbphB4TFFXpv7MNrT52FGrrgVXF1owhMVTHFZIlnvd4= +github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0/go.mod h1:DZGJHZMqrU4JJqFAWUS2UO1+lbSKsdiOoYi9Zzey7Fc= github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f h1:U5y3Y5UE0w7amNe7Z5G/twsBW0KEalRQXZzf8ufSh9I= github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f/go.mod h1:xH/i4TFMt8koVQZ6WFms69WAsDWr2XsYL3Hkl7jkoLE= -github.com/dgraph-io/badger/v2 v2.2007.2 h1:EjjK0KqwaFMlPin1ajhP943VPENHJdEz1KLIegjaI3k= -github.com/dgraph-io/badger/v2 v2.2007.2/go.mod h1:26P/7fbL4kUZVEVKLAKXkBXKOydDmM2p1e+NhhnBCAE= +github.com/dgraph-io/badger/v2 v2.2007.4 h1:TRWBQg8UrlUhaFdco01nO2uXwzKS7zd+HVdwV/GHc4o= +github.com/dgraph-io/badger/v2 v2.2007.4/go.mod h1:vSw/ax2qojzbN6eXHIx6KPKtCSHJN/Uz0X0VPruTIhk= github.com/dgraph-io/ristretto v0.0.3-0.20200630154024-f66de99634de/go.mod h1:KPxhHT9ZxKefz+PCeOGsrHpl1qZ7i70dGTu2u+Ahh6E= -github.com/dgraph-io/ristretto v0.0.3 h1:jh22xisGBjrEVnRZ1DVTpBVQm0Xndu8sMl0CWDzSIBI= -github.com/dgraph-io/ristretto v0.0.3/go.mod h1:KPxhHT9ZxKefz+PCeOGsrHpl1qZ7i70dGTu2u+Ahh6E= +github.com/dgraph-io/ristretto v0.1.1 h1:6CWw5tJNgpegArSHpNHJKldNeq03FQCwYvfMVWajOK8= +github.com/dgraph-io/ristretto v0.1.1/go.mod h1:S1GPSBCYCIhmVNfcth17y2zZtQT6wzkzgwUve0VDWWA= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y= github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= -github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= -github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= -github.com/docker/docker v1.4.2-0.20180625184442-8e610b2b55bf/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/distribution v2.8.1+incompatible h1:Q50tZOPR6T/hjNsyc9g8/syEs6bk8XXApsHjKukMl68= +github.com/docker/distribution v2.8.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v20.10.19+incompatible h1:lzEmjivyNHFHMNAFLXORMBXyGIhw/UP4DvJwvyKYq64= +github.com/docker/docker v20.10.19+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= -github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw= -github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= -github.com/dop251/goja v0.0.0-20200721192441-a695b0cdd498/go.mod h1:Mw6PkjjMXWbTj+nnj4s3QPXq1jaT0s5pC0iFD4+BOAA= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= -github.com/dvsekhvalnov/jose2go v0.0.0-20200901110807-248326c1351b h1:HBah4D48ypg3J7Np4N+HY/ZR76fx3HEUGxDU6Uk39oQ= -github.com/dvsekhvalnov/jose2go v0.0.0-20200901110807-248326c1351b/go.mod h1:7BvyPhdbLxMXIYTFPLsyJRFMsKmOZnQmzh6Gb+uquuM= -github.com/dvyukov/go-fuzz v0.0.0-20200318091601-be3528f3a813/go.mod h1:11Gm+ccJnvAhCNLlf5+cS9KjtbaD5I5zaZpFMsTHWTw= +github.com/dvsekhvalnov/jose2go v1.5.0 h1:3j8ya4Z4kMCwT5nXIKFSV84YS+HdqSSO0VsTQxaLAeM= +github.com/dvsekhvalnov/jose2go v1.5.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= -github.com/edsrzf/mmap-go v0.0.0-20160512033002-935e0e8a636c/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= -github.com/enigmampc/btcutil v1.0.3-0.20200723161021-e2fb6adb2a25/go.mod h1:hTr8+TLQmkUkgcuh3mcr5fjrT9c64ZzsBCdCEC6UppY= +github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= -github.com/envoyproxy/go-control-plane v0.10.3/go.mod h1:fJJn/j26vwOu972OllsvAgJJM//w9BV6Fxbg2LuVd34= -github.com/envoyproxy/go-control-plane v0.11.0/go.mod h1:VnHyVMpzcLvCFt9yUz1UnCwHLhwx1WguiVDV7pTG/tI= -github.com/envoyproxy/protoc-gen-validate v0.0.14/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/envoyproxy/protoc-gen-validate v0.6.7/go.mod h1:dyJXwwfPK2VSqiB9Klm1J6romD608Ba7Hij42vrOBCo= -github.com/envoyproxy/protoc-gen-validate v0.9.1/go.mod h1:OKNgG7TCp5pF4d6XftA0++PMirau2/yoOwVac3AbF2w= -github.com/envoyproxy/protoc-gen-validate v0.10.0/go.mod h1:DRjgyB0I43LtJapqN6NiRwroiAU2PaFuvk/vjgh61ss= -github.com/esimonov/ifshort v1.0.2/go.mod h1:yZqNJUrNn20K8Q9n2CrjTKYyVEmX209Hgu+M1LBpeZE= -github.com/ethereum/go-ethereum v1.9.25/go.mod h1:vMkFiYLHI4tgPw4k2j4MHKoovchFE8plZ0M9VMk4/oM= -github.com/ettle/strcase v0.1.1/go.mod h1:hzDLsPC7/lwKyBOywSHEP89nt2pDgdy+No1NBA9o9VY= -github.com/facebookgo/ensure v0.0.0-20160127193407-b4ab57deab51 h1:0JZ+dUmQeA8IIVUMzysrX4/AKuQwWhV2dYQuPZdvdSQ= -github.com/facebookgo/ensure v0.0.0-20160127193407-b4ab57deab51/go.mod h1:Yg+htXGokKKdzcwhuNDwVvN+uBxDGXJ7G/VN1d8fa64= -github.com/facebookgo/stack v0.0.0-20160209184415-751773369052 h1:JWuenKqqX8nojtoVVWjGfOF9635RETekkoH6Cc9SX0A= -github.com/facebookgo/stack v0.0.0-20160209184415-751773369052/go.mod h1:UbMTZqLaRiH3MsBH8va0n7s1pQYcu3uTb8G4tygF4Zg= -github.com/facebookgo/subset v0.0.0-20150612182917-8dac2c3c4870 h1:E2s37DuLxFhQDg5gKsWoLBOB0n+ZW8s599zru8FJ2/Y= -github.com/facebookgo/subset v0.0.0-20150612182917-8dac2c3c4870/go.mod h1:5tD+neXqOorC30/tWg0LCSkrqj/AR6gu8yY8/fpw1q0= -github.com/fatih/color v1.3.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/envoyproxy/protoc-gen-validate v1.0.2 h1:QkIBuU5k+x7/QXPvPPnWXWlCdaBFApVqftFV6k087DA= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= -github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= -github.com/fatih/color v1.12.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= -github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= -github.com/felixge/httpsnoop v1.0.1 h1:lvB5Jl89CsZtGIWuTcDM1E/vkVs49/Ml7JJe07l8SPQ= github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/fjl/memsize v0.0.0-20180418122429-ca190fb6ffbc/go.mod h1:VvhXpOYNQvB+uIk2RvXzuaQtkQJzzIx6lSBe1xv7hi0= -github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= -github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= +github.com/felixge/httpsnoop v1.0.2 h1:+nS9g82KMXccJ/wp0zyRW9ZBHFETmMGtkk+2CTTrW4o= +github.com/felixge/httpsnoop v1.0.2/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= -github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= -github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= +github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fullstorydev/grpcurl v1.6.0/go.mod h1:ZQ+ayqbKMJNhzLmbpCiurTVlaK2M/3nqZCxaQ2Ze/sM= -github.com/fzipp/gocyclo v0.3.1/go.mod h1:DJHO6AUmbdqj2ET4Z9iArSuwWgYDRryYt2wASxc7x3E= -github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff/go.mod h1:x7DCsMOv1taUwEWCzT4cmDeAkigA5/QCwUodaVOe8Ww= +github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= +github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/getsentry/sentry-go v0.23.0 h1:dn+QRCeJv4pPt9OjVXiMcGIBIefaTJPw/h0bZWO05nE= +github.com/getsentry/sentry-go v0.23.0/go.mod h1:lc76E2QywIyW8WuBnwl8Lc4bkmQH4+w1gwTf25trprY= +github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= -github.com/gin-gonic/gin v1.6.3 h1:ahKqKTFpO5KTPHxWZjEdPScmYaGtLo8Y4DMHoEsnp14= -github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M= -github.com/go-critic/go-critic v0.5.6/go.mod h1:cVjj0DfqewQVIlIAGexPCaGaZDAqGE29PYDDADIVNEo= -github.com/go-fonts/dejavu v0.1.0/go.mod h1:4Wt4I4OU2Nq9asgDCteaAaWZOV24E+0/Pwo0gppep4g= -github.com/go-fonts/latin-modern v0.2.0/go.mod h1:rQVLdDMK+mK1xscDwsqM5J8U2jrRa3T0ecnM9pNujks= -github.com/go-fonts/liberation v0.1.1/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY= -github.com/go-fonts/liberation v0.2.0/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY= -github.com/go-fonts/stix v0.1.0/go.mod h1:w/c1f0ldAUlJmLBvlbkvVXLAD+tAMqobIIQpmnUIzUY= +github.com/gin-gonic/gin v1.9.0 h1:OjyFBKICoexlu99ctXNR2gg+c5pKrKMuyjgARg9qeY8= +github.com/gin-gonic/gin v1.9.0/go.mod h1:W1Me9+hsUSyj3CePGrd1/QrKJMSJ1Tu/0hFEH89961k= +github.com/go-chi/chi/v5 v5.0.7 h1:rDTPXLDHGATaeHvVlLcR4Qe0zftYethFucbjVQ1PxU8= +github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/kit v0.10.0 h1:dXFJfIHVvUcpSgDOV+Ne6t7jXri8Tfv2uOLHUZ2XNuo= github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o= -github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= -github.com/go-latex/latex v0.0.0-20210118124228-b3d85cf34e07/go.mod h1:CO1AlKB2CSIqUrmQPqA0gdRIlnLEY0gK5JGjh37zN5U= -github.com/go-latex/latex v0.0.0-20210823091927-c0d11ff05a81/go.mod h1:SX0U8uGpxhq9o2S/CELCSUxEWWAuoCUcVCQWv7G2OCk= +github.com/go-kit/kit v0.12.0 h1:e4o3o3IsBfAKQh5Qbbiqyfu97Ku7jrO/JbohvztANh4= +github.com/go-kit/kit v0.12.0/go.mod h1:lHd+EkCZPIwYItmGDDRdhinkzX2A1sj+M9biaEaizzs= +github.com/go-kit/log v0.2.1 h1:MRVx0/zhvdseW+Gza6N9rVzU/IVzaeE1SFI4raAhmBU= +github.com/go-kit/log v0.2.1/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-logfmt/logfmt v0.5.0 h1:TrB8swr/68K7m9CcGut2g3UOihhbcbiMAYiuTXdEih4= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= -github.com/go-ole/go-ole v1.2.1/go.mod h1:7FAglXiTm7HKlQRDeOQ6ZNUHidzCWXuZWq/1dTyBNF8= -github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= -github.com/go-pdf/fpdf v0.5.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= -github.com/go-pdf/fpdf v0.6.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= -github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A= -github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= -github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= -github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= -github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= -github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= -github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= -github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= -github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI= -github.com/go-playground/validator/v10 v10.9.0 h1:NgTtmN58D0m8+UuxtYmGztBJB7VnPgjj221I1QHci2A= -github.com/go-playground/validator/v10 v10.9.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos= -github.com/go-redis/redis v6.15.8+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= -github.com/go-sourcemap/sourcemap v2.1.2+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= +github.com/go-logfmt/logfmt v0.5.1 h1:otpy5pqBCBZ1ng9RQ0dPu4PN7ba75Y/aA+UpowDyNVA= +github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= +github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.11.2 h1:q3SHpufmypg+erIExEKUmsgmhDTyhcJ38oeKGACXohU= +github.com/go-playground/validator/v10 v10.11.2/go.mod h1:NieE624vt4SCTJtD87arVLvdmjPAeV8BQlHtMnw9D7s= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= -github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= -github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= -github.com/go-toolsmith/astcast v1.0.0/go.mod h1:mt2OdQTeAQcY4DQgPSArJjHCcOwlX+Wl/kwN+LbLGQ4= -github.com/go-toolsmith/astcopy v1.0.0/go.mod h1:vrgyG+5Bxrnz4MZWPF+pI4R8h3qKRjjyvV/DSez4WVQ= -github.com/go-toolsmith/astequal v1.0.0/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY= -github.com/go-toolsmith/astfmt v1.0.0/go.mod h1:cnWmsOAuq4jJY6Ct5YWlVLmcmLMn1JUPuQIHCY7CJDw= -github.com/go-toolsmith/astinfo v0.0.0-20180906194353-9809ff7efb21/go.mod h1:dDStQCHtmZpYOmjRP/8gHHnCCch3Zz3oEgCdZVdtweU= -github.com/go-toolsmith/astp v1.0.0/go.mod h1:RSyrtpVlfTFGDYRbrjyWP1pYu//tSFcvdYrA8meBmLI= -github.com/go-toolsmith/pkgload v1.0.0/go.mod h1:5eFArkbO80v7Z0kdngIxsRXRMTaX4Ilcwuh3clNrQJc= -github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= -github.com/go-toolsmith/typep v1.0.0/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU= -github.com/go-toolsmith/typep v1.0.2/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU= -github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= -github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee h1:s+21KNqlpePfkah2I+gwHF8xmJWRjooY+5248k6m4A0= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= github.com/gobwas/pool v0.2.0 h1:QEmUOlnSjWtnpRGHF3SauEiOsy82Cup83Vf2LcMlnc8= github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.0.2 h1:CoAavW/wd/kulfZmSIBt6p24n4j7tHgNVCjsfHVNUbo= github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= -github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/goccy/go-json v0.10.0 h1:mXKd9Qw4NuzShiRlOXKews24ufknHO7gx30lsDyokKA= +github.com/goccy/go-json v0.10.0/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0= github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= -github.com/gogo/gateway v1.1.0 h1:u0SuhL9+Il+UbjM9VIE3ntfRujKbvVpFvNB4HbjeVQ0= -github.com/gogo/gateway v1.1.0/go.mod h1:S7rR8FRQyG3QFESeSv4l2WnsyzlCLG0CzBbUUo/mbic= -github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= +github.com/gofrs/uuid v4.3.0+incompatible h1:CaSVZxm5B+7o45rtab4jC2G37WGYX1zQfuU2i6DSvnc= +github.com/gofrs/uuid v4.3.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= +github.com/gogo/googleapis v1.4.1-0.20201022092350-68b0159b7869/go.mod h1:5YRNX2z1oM5gXdAkurHa942MDgEJyk02w4OecKY87+c= +github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0= +github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= +github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= +github.com/golang/glog v1.1.2 h1:DVjP2PbBOzHyzA+dn3WhHIq4NdVu3Q+pvivFICf/7fo= +github.com/golang/glog v1.1.2/go.mod h1:zR+okUeTbrL6EL3xHUDxZuEtGv04p5shwip1+mL/rLQ= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= @@ -1105,7 +512,6 @@ github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71 github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= -github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.0/go.mod h1:Qd/q+1AKNOZr9uGQzbzCmRO6sUih6GTPZv6a1/R87v0= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -1127,27 +533,13 @@ github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiu github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.3-0.20201103224600-674baa8c7fc3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2/go.mod h1:k9Qvh+8juN+UKMCS/3jFtGICgW8O96FVaZsaxdzDkR4= -github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk= -github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8= -github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a/go.mod h1:9qCChq59u/eW8im404Q2WWTrnBUQKjpNYKMbU4M7EFU= -github.com/golangci/golangci-lint v1.42.1/go.mod h1:MuInrVlgg2jq4do6XI1jbkErbVHVbwdrLLtGv6p2wPI= -github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg= -github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca/go.mod h1:tvlJhZqDe4LMs4ZHD0oMUlt9G2LWuDGoisJTBzLMV9o= -github.com/golangci/misspell v0.3.5/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA= -github.com/golangci/revgrep v0.0.0-20210208091834-cd28932614b5/go.mod h1:LK+zW4MpyytAWQRz0M4xnzEk50lSvqDQKfx304apFkY= -github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0 h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/certificate-transparency-go v1.0.21/go.mod h1:QeJfpSbVSfYc7RgB3gJFj9cbuQMMchQxrWXz8Ruopmg= -github.com/google/certificate-transparency-go v1.1.1/go.mod h1:FDKqPvSXawb2ecErVRrD+nfy23RCzyl7eqVCEmlT1Zs= -github.com/google/flatbuffers v2.0.8+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= +github.com/google/btree v1.1.2 h1:xf4v41cLI2Z6FxbKm+8Bu+m8ifhj15JuZ9sa0jZCMUU= +github.com/google/btree v1.1.2/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -1162,18 +554,18 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/gofuzz v1.1.1-0.20200604201612-c04b05f3adfa/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= -github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= -github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= +github.com/google/martian/v3 v3.3.2 h1:IqNFLAmvJOgVlpdEBiQbDc2EwKW77amAycfTuWKdfvw= github.com/google/orderedcode v0.0.1 h1:UzfcAexk9Vhv8+9pNOgRu41f16lHq725vPwnSeiG/Us= github.com/google/orderedcode v0.0.1/go.mod h1:iVyU4/qPKHY5h/wSd6rZZCDcLJNxiWO6dvsYES2Sb20= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= @@ -1182,7 +574,6 @@ github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200507031123-427632fa3b1c/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= @@ -1193,21 +584,18 @@ github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/s2a-go v0.1.0/go.mod h1:OJpEgntRZo8ugHpF9hkoLJbS5dSI20XZeXJ9JVywLlM= -github.com/google/s2a-go v0.1.3/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= -github.com/google/s2a-go v0.1.4/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= -github.com/google/trillian v1.3.11/go.mod h1:0tPraVHrSDkA3BO6vKX67zgLXs6SsOAbHEivX+9mPgw= -github.com/google/uuid v0.0.0-20161128191214-064e2069ce9c/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= +github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4= +github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/enterprise-certificate-proxy v0.2.0/go.mod h1:8C0jb7/mgJe/9KK8Lm7X9ctZC2t60YyIpYEI16jx0Qg= -github.com/googleapis/enterprise-certificate-proxy v0.2.1/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= -github.com/googleapis/enterprise-certificate-proxy v0.2.3/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= +github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= +github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= @@ -1217,19 +605,11 @@ github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99 github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c= github.com/googleapis/gax-go/v2 v2.5.1/go.mod h1:h6B0KMMFNtI2ddbGJn3T3ZbwkeT6yqEF02fYlzkUCyo= github.com/googleapis/gax-go/v2 v2.6.0/go.mod h1:1mjbznJAPHFpesgE5ucqfYEscaz5kMdcIDwU/6+DDoY= -github.com/googleapis/gax-go/v2 v2.7.0/go.mod h1:TEop28CZZQ2y+c0VxMUmu1lV+fQx57QpBWsYpwqHJx8= -github.com/googleapis/gax-go/v2 v2.7.1/go.mod h1:4orTrqY6hXxxaUL4LHIPl6lGo8vAE38/qKbhSAKP6QI= -github.com/googleapis/gax-go/v2 v2.8.0/go.mod h1:4orTrqY6hXxxaUL4LHIPl6lGo8vAE38/qKbhSAKP6QI= -github.com/googleapis/gax-go/v2 v2.10.0/go.mod h1:4UOEnMCrxsSqQ940WnTiD6qJ63le2ev3xfyagutxiPw= -github.com/googleapis/gax-go/v2 v2.11.0/go.mod h1:DxmR61SGKkGLa2xigwuZIQpkCI2S5iydzRfb3peWZJI= +github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas= +github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU= github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= -github.com/gookit/color v1.4.2/go.mod h1:fqRyamkC1W8uxl+lxCQxOT09l/vYfZ+QeiX3rKQHCoQ= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gordonklaus/ineffassign v0.0.0-20200309095847-7953dde2c7bf/go.mod h1:cuNKsD1zp2v6XfE/orVX2QE1LC+i254ceGcVeDT3pTU= -github.com/gordonklaus/ineffassign v0.0.0-20210225214923-2e10b2664254/go.mod h1:M9mZEtGIsR1oDaZagNPNG9iq9n2HrhZ17dsXk73V3Lw= -github.com/gorhill/cronexpr v0.0.0-20180427100037-88b0669f7d75/go.mod h1:g2644b03hfBX9Ov0ZBDgXXens4rxSxmqFBbhvKv2yVA= github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4= github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q= @@ -1238,35 +618,19 @@ github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2z github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= -github.com/gorilla/websocket v1.4.1-0.20190629185528-ae1634f6a989/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= -github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= -github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= -github.com/gostaticanalysis/analysisutil v0.1.0/go.mod h1:dMhHRU9KTiDcuLGdy87/2gTR8WruwYZrKdRq9m1O6uw= -github.com/gostaticanalysis/analysisutil v0.4.1/go.mod h1:18U/DLpRgIUd459wGxVHE0fRgmo1UgHDcbw7F5idXu0= -github.com/gostaticanalysis/comment v1.3.0/go.mod h1:xMicKDx7XRXYdVwY9f9wQpDJVnqWxw9wCauCMKp+IBI= -github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado= -github.com/gostaticanalysis/forcetypeassert v0.0.0-20200621232751-01d4955beaa5/go.mod h1:qZEedyP/sY1lTGV1uJ3VhWZ2mqag3IkWsDHVbplHXak= -github.com/gostaticanalysis/nilerr v0.1.1/go.mod h1:wZYb6YI5YAxxq0i1+VJbY0s2YONW0HU0GPE3+5PWN4A= -github.com/gotestyourself/gotestyourself v2.2.0+incompatible/go.mod h1:zZKM6oeNM8k+FRljX1mnzVYeS8wiGgQyvST1/GafPbY= -github.com/graph-gophers/graphql-go v0.0.0-20191115155744-f33e81362277/go.mod h1:9CQHMSxwO4MprSdzoIEobiHpoLtHm77vfxsvsIN5Vuc= -github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= -github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.2.2/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= -github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= -github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= -github.com/grpc-ecosystem/grpc-gateway v1.12.1/go.mod h1:8XEsbTttt/W+VvjtQhLACqCisSPWTxCZ7sBRjU6iH9c= github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0 h1:Wqo399gCIufwto+VfwCSvsnfGpF/w5E9CNxSwbpD6No= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.0/go.mod h1:qmOFXW2epJhM0qSnUUYpldc7gVz2KMQwJ/QYCDIa7XU= github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= github.com/gtank/merlin v0.1.1-0.20191105220539-8318aed1a79f/go.mod h1:T86dnYJhcGOh5BjZFCJWTDeTK7XW8uE+E21Cy/bIQ+s= @@ -1274,120 +638,105 @@ github.com/gtank/merlin v0.1.1 h1:eQ90iG7K9pOhtereWsmyRJ6RAwcP4tHTDBHXNg+u5is= github.com/gtank/merlin v0.1.1/go.mod h1:T86dnYJhcGOh5BjZFCJWTDeTK7XW8uE+E21Cy/bIQ+s= github.com/gtank/ristretto255 v0.1.2 h1:JEqUCPA1NvLq5DwYtuzigd7ss8fwbYay9fi4/5uMzcc= github.com/gtank/ristretto255 v0.1.2/go.mod h1:Ph5OpO6c7xKUGROZfWVLiJf9icMDwUeIvY4OmlYW69o= -github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= -github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-immutable-radix v1.0.0 h1:AKDB1HM5PWEA7i4nhcpwOrO2byshxBjXVn/J/3+z5/0= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-getter v1.7.1 h1:SWiSWN/42qdpR0MdhaOc/bLR48PLuP1ZQtYLRlM69uY= +github.com/hashicorp/go-getter v1.7.1/go.mod h1:W7TalhMmbPmsSMdNjD0ZskARur/9GJ17cfHTRtXV744= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-safetemp v1.0.0 h1:2HR189eFNrjHQyENnQMMpCiBAsRxzbTMIgBhEyExpmo= +github.com/hashicorp/go-safetemp v1.0.0/go.mod h1:oaerMy3BhqiTbVye6QuFhFtIceqFoDHxNAB65b+Rj1I= github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE= github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= +github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= -github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d h1:dg1dEPuWpEqDnvIw251EVy4zlP8gWbsGj4BsUKCRpYs= +github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= -github.com/hdevalence/ed25519consensus v0.0.0-20210204194344-59a8610d2b87 h1:uUjLpLt6bVvZ72SQc/B4dXcPBw4Vgd7soowdRl52qEM= -github.com/hdevalence/ed25519consensus v0.0.0-20210204194344-59a8610d2b87/go.mod h1:XGsKKeXxeRr95aEOgipvluMPlgjr7dGlk9ZTWOjcUcg= -github.com/holiman/uint256 v1.1.1/go.mod h1:y4ga/t+u+Xwd7CpDgZESaRcWy0I7XMlTMA25ApIH5Jw= +github.com/hdevalence/ed25519consensus v0.1.0 h1:jtBwzzcHuTmFrQN6xQZn6CQEO/V9f7HsjsjeEZ6auqU= +github.com/hdevalence/ed25519consensus v0.1.0/go.mod h1:w3BHWjwJbFU29IRHL1Iqkw3sus+7FctEyM4RqDxYNzo= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo= -github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4= +github.com/huandu/go-assert v1.1.5 h1:fjemmA7sSfYHJD7CUqs9qTwwfdNAx7/j2/ZlHXzNB3c= +github.com/huandu/go-assert v1.1.5/go.mod h1:yOLvuqZwmcHIC5rIzrBhT7D3Q9c3GFnd0JrPVhn/06U= +github.com/huandu/skiplist v1.2.0 h1:gox56QD77HzSC0w+Ws3MH3iie755GBJU1OER3h5VsYw= +github.com/huandu/skiplist v1.2.0/go.mod h1:7v3iFjLcSAzO4fN5B8dvebvo/qsfumiLiDXMrPiHF9w= github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= -github.com/huin/goupnp v1.0.0/go.mod h1:n9v9KO1tAxYH82qOn+UTIFQDmx5n1Zxd/ClZDMX7Bnc= -github.com/huin/goutil v0.0.0-20170803182201-1ca381bf3150/go.mod h1:PpLOETDnJ0o3iZrZfqZzyLl6l7F3c6L1oWn7OICBi6o= -github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/imdario/mergo v0.3.4/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/improbable-eng/grpc-web v0.14.1 h1:NrN4PY71A6tAz2sKDvC5JCauENWp0ykG8Oq1H3cpFvw= -github.com/improbable-eng/grpc-web v0.14.1/go.mod h1:zEjGHa8DAlkoOXmswrNvhUGEYQA9UI7DhrGeHR1DMGU= -github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= +github.com/improbable-eng/grpc-web v0.15.0 h1:BN+7z6uNXZ1tQGcNAuaU1YjsLTApzkjt2tzCixLaUPQ= +github.com/improbable-eng/grpc-web v0.15.0/go.mod h1:1sy9HKV4Jt9aEs9JSnkWlRJPuPtwNr0l57L4f878wP8= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/influxdata/influxdb v1.2.3-0.20180221223340-01288bdb0883/go.mod h1:qZna6X/4elxqT3yI9iZYdZrWWdeFOOprn86kgg4+IzY= +github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc= +github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= -github.com/jackpal/go-nat-pmp v1.0.2-0.20160603034137-1fa385a6f458/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+4orBN1SBKc= -github.com/jedisct1/go-minisign v0.0.0-20190909160543-45766022959e/go.mod h1:G1CVv03EnqU1wYL2dFwXxW2An0az9JTl/ZsqXQeBlkU= -github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= -github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= -github.com/jgautheron/goconst v1.5.1/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= -github.com/jhump/protoreflect v1.6.1/go.mod h1:RZQ/lnuN+zqeRVpQigTwO6o0AJUkxbnSnpuG7toUTG4= -github.com/jhump/protoreflect v1.9.0 h1:npqHz788dryJiR/l6K/RUQAyh2SwV91+d1dnh4RjO9w= -github.com/jhump/protoreflect v1.9.0/go.mod h1:7GcYQDdMU/O/BBrl/cX6PNHpXh6cenjd8pneu5yW7Tg= -github.com/jingyugao/rowserrcheck v1.1.0/go.mod h1:TOQpc2SLx6huPfoFGK3UOnEG+u02D3C1GeosjupAKCA= -github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0= +github.com/jdxcode/netrc v0.0.0-20210204082910-926c7f70242a h1:d4+I1YEKVmWZrgkt6jpXBnLgV2ZjO0YxEtLDdfIZfH4= +github.com/jdxcode/netrc v0.0.0-20210204082910-926c7f70242a/go.mod h1:Zi/ZFkEqFHTm7qkjyNJjaWH4LQA9LQhGJyF0lTYGpxw= +github.com/jhump/protocompile v0.0.0-20220216033700-d705409f108f h1:BNuUg9k2EiJmlMwjoef3e8vZLHplbVw6DrjGFjLL+Yo= +github.com/jhump/protocompile v0.0.0-20220216033700-d705409f108f/go.mod h1:qr2b5kx4HbFS7/g4uYO5qv9ei8303JMsC7ESbYiqr2Q= +github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgfCL6c= +github.com/jhump/protoreflect v1.15.1/go.mod h1:jD/2GMKKE6OqX8qTjhADU1e6DShO+gavG9e0Q693nKo= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/jmhodges/levigo v1.0.0 h1:q5EC36kV79HWeTBWsod3mG11EgStG3qArTKcvlksN1U= github.com/jmhodges/levigo v1.0.0/go.mod h1:Q6Qx+uH3RAqyK4rFQroq9RL7mdkABMcfhEI+nNuzMJQ= -github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= -github.com/jonboulle/clockwork v0.2.0/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= -github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ= -github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= -github.com/juju/ratelimit v1.0.1/go.mod h1:qapgC/Gy+xNh9UxzV13HGGl/6UXNN+ct+vwSgWNm/qk= -github.com/julienschmidt/httprouter v1.1.1-0.20170430222011-975b5c4c7c21/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= -github.com/julz/importas v0.0.0-20210419104244-841f0c0fe66d/go.mod h1:oSFU2R4XK/P7kNBrnL/FEQlDGN1/6WoxXEjSSXO0DV0= -github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= -github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= -github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= -github.com/karalabe/usb v0.0.0-20190919080040-51dc0efba356/go.mod h1:Od972xHfMJowv7NGVDiWVxk2zxnWgjLlJzE+F4F7AGU= -github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= -github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d h1:Z+RDyXzjKE0i2sTjZ/b1uxiGtPhFy34Ou/Tk0qwN0kM= -github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d/go.mod h1:JJNrCn9otv/2QP4D7SMJBgaleKpOf66PnW6F5WGNRIc= +github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= +github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/errcheck v1.6.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= -github.com/kkdai/bstream v1.0.0/go.mod h1:FDnDOHt5Yx4p3FaHcioFT0QjDOtgUpvjeZqAs+NVZZA= -github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/klauspost/compress v1.10.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/klauspost/compress v1.11.0/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/klauspost/compress v1.15.9 h1:wKRjX6JRtDdrE9qwa4b/Cip7ACOshUI4smpCQanqjSY= -github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= +github.com/klauspost/compress v1.12.3/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= +github.com/klauspost/compress v1.15.11/go.mod h1:QPwzmACJjUTFsnSHH934V6woptycfrDDJnH7hvFVbGM= +github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= +github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/cpuid/v2 v2.0.9 h1:lgaqFMSdTdQYdZ04uHyN2d/eKdOMyi2YLSvlQIBFYa4= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE= +github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= @@ -1396,116 +745,75 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kulti/thelper v0.4.0/go.mod h1:vMu2Cizjy/grP+jmsvOFDx1kYP6+PD1lqg4Yu5exl2U= -github.com/kunwardeep/paralleltest v1.0.2/go.mod h1:ZPqNm1fVHPllh5LPVujzbVz1JN2GhLxSfY+oqUsvG30= -github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= -github.com/kyoh86/exportloopref v0.1.8/go.mod h1:1tUcJeiioIs7VWe5gcOObrux3lb66+sBqGZrRkMwPgg= -github.com/ldez/gomoddirectives v0.2.2/go.mod h1:cpgBogWITnCfRq2qGoDkKMEVSaarhdBr6g8G04uz6d0= -github.com/ldez/tagliatelle v0.2.0/go.mod h1:8s6WJQwEYHbKZDsp/LjArytKOG8qaMrKQQ3mFukHs88= -github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= -github.com/letsencrypt/pkcs11key/v4 v4.0.0/go.mod h1:EFUvBDay26dErnNb70Nd0/VW3tJiIbETBPTl9ATXQag= -github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.10.2 h1:AqzbZs4ZoCBp+GtejcpCpcxM3zlSMx29dXbUSeVtJb8= -github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/libp2p/go-buffer-pool v0.0.2 h1:QNK2iAFa8gjAe1SPz6mHSMuCcjs+X1wlHzeOSqcmlfs= -github.com/libp2p/go-buffer-pool v0.0.2/go.mod h1:MvaB6xw5vOrDl8rYZGLFdKAuk/hRoRZd1Vi32+RXyFM= +github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw= +github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/libp2p/go-buffer-pool v0.1.0 h1:oK4mSFcQz7cTQIfqbe4MIj9gLW+mnanjyFtc6cdF0Y8= +github.com/libp2p/go-buffer-pool v0.1.0/go.mod h1:N+vh8gMqimBzdKkSMVuydVDq+UV5QTWy5HSiZacSbPg= github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= -github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= -github.com/lucasjones/reggen v0.0.0-20180717132126-cdb49ff09d77/go.mod h1:5ELEyG+X8f+meRWHuqUOewBOhvHkl7M76pdGEansxW4= -github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= -github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA= -github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o= +github.com/linxGnu/grocksdb v1.7.16 h1:Q2co1xrpdkr5Hx3Fp+f+f7fRGhQFQhvi/+226dtLmA8= +github.com/linxGnu/grocksdb v1.7.16/go.mod h1:JkS7pl5qWpGpuVb3bPqTz8nC12X3YtPZT+Xq7+QfQo4= +github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= -github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= -github.com/maratori/testpackage v1.0.1/go.mod h1:ddKdw+XG0Phzhx8BFDTKgpWP4i7MpApTE5fXSKAqwDU= -github.com/matoous/godox v0.0.0-20210227103229-6504466cf951/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s= +github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= +github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA= +github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-colorable v0.1.0/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-ieproxy v0.0.0-20190610004146-91bb50d98149/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc= -github.com/mattn/go-ieproxy v0.0.0-20190702010315-6dee0af9227d/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.5-0.20180830101745-3fb116b82035/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= -github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng= github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= -github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= -github.com/mattn/go-runewidth v0.0.6/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= -github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= -github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= -github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= -github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= -github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= -github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/mbilski/exhaustivestruct v1.2.0/go.mod h1:OeTBVxQWoEmB2J2JCHmXWPJ0aksxSUOUy+nvtVEfzXc= -github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81/go.mod h1:KQ7+USdGKfpPjXk4Ga+5XxQM4Lm4e3gAogrreFAYpOg= -github.com/mgechev/revive v1.1.1/go.mod h1:PKqk4L74K6wVNwY2b6fr+9Qqr/3hIsHVfZCJdbvozrY= +github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= +github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/miekg/dns v1.1.35/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= -github.com/miekg/pkcs11 v1.0.2/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= -github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= -github.com/mimoo/StrobeGo v0.0.0-20181016162300-f8f6d4d2b643 h1:hLDRPB66XQT/8+wG9WsDpiCvZf1yKO7sz7scAjSlBa0= github.com/mimoo/StrobeGo v0.0.0-20181016162300-f8f6d4d2b643/go.mod h1:43+3pMjjKimDBf5Kr4ZFNGbLql1zKkbImw+fZbw3geM= -github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY= -github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE= -github.com/minio/highwayhash v1.0.1 h1:dZ6IIu8Z14VlC0VpfKofAhCy74wu/Qb5gcn52yWoz/0= -github.com/minio/highwayhash v1.0.1/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY= +github.com/mimoo/StrobeGo v0.0.0-20210601165009-122bf33a46e0 h1:QRUSJEgZn2Snx0EmT/QLXibWjSUDjKWvXIT19NBVp94= +github.com/mimoo/StrobeGo v0.0.0-20210601165009-122bf33a46e0/go.mod h1:43+3pMjjKimDBf5Kr4ZFNGbLql1zKkbImw+fZbw3geM= +github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g= +github.com/minio/highwayhash v1.0.2/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= -github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-ps v1.0.0/go.mod h1:J4lOc8z8yJs6vUwklHw2XEIiT4z4C40KtWVN3nvg8Pg= github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU= +github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/moby/sys/mountinfo v0.4.1/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/moby/buildkit v0.10.4 h1:FvC+buO8isGpUFZ1abdSLdGHZVqg9sqI4BbFL8tlzP4= +github.com/moby/buildkit v0.10.4/go.mod h1:Yajz9vt1Zw5q9Pp4pdb3TCSUXJBIroIQGQ3TTs/sLug= +github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae h1:O4SWKdcHVCvYqyDV+9CJA1fcDN2L11Bule0iFy3YlAI= +github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= -github.com/moricho/tparallel v0.2.1/go.mod h1:fXEIZxG2vdfl0ZF8b42f5a78EhjjD5mX8qUplsoSU4k= -github.com/mozilla/scribe v0.0.0-20180711195314-fb71baf557c1/go.mod h1:FIczTrinKo8VaLxe6PWTPEXRXDIHz2QAwiaBaP5/4a8= -github.com/mozilla/tls-observatory v0.0.0-20210609171429-7bc42856d2e5/go.mod h1:FUqVoUPHSEdDR0MnFM3Dh8AU0pZHLXUD127SAJGER/s= -github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs= github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/mwitkow/go-proto-validators v0.0.0-20180403085117-0950a7990007/go.mod h1:m2XC9Qq0AlmmVksL6FktJCdTYyLk7V3fKyp0sl1yWQo= -github.com/mwitkow/go-proto-validators v0.2.0/go.mod h1:ZfA1hW+UH/2ZHOWvQ3HnQaU0DtnpXu850MZiy+YUgcc= github.com/mwitkow/grpc-proxy v0.0.0-20181017164139-0f1106ef9c76/go.mod h1:x5OoJHDHqxHS801UIuhqGl6QdSAEJvtausosHSdazIo= -github.com/nakabonne/nestif v0.3.0/go.mod h1:dI314BppzXjJ4HsCnbo7XzrJHPszZsjnk5wEBSYHI2c= -github.com/naoina/go-stringutil v0.1.0/go.mod h1:XJ2SJL9jCtBh+P9q5btrd/Ylo8XwT/h1USek5+NqSA0= -github.com/naoina/toml v0.1.2-0.20170918210437-9fafd6967416/go.mod h1:NBIhNtsFMo3G2szEBne+bO4gS192HuIYRqfvOWb4i1E= github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k= @@ -1513,48 +821,27 @@ github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzE github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= -github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354/go.mod h1:KSVJerMDfblTH7p5MZaTt+8zaT2iEk3AkVb9PQdZuE8= -github.com/neilotoole/errgroup v0.1.5/go.mod h1:Q2nLGf+594h0CLBs/Mbg6qOr7GtqDK7C2S41udRnToE= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= -github.com/nishanths/exhaustive v0.2.3/go.mod h1:bhIX678Nx8inLM9PbpvK1yv6oGtoP8BfaIeMzgBNKvc= -github.com/nishanths/predeclared v0.0.0-20190419143655-18a43bb90ffc/go.mod h1:62PewwiQTlm/7Rj+cxVYqZvDIUc+JjZq6GHAC1fsObQ= -github.com/nishanths/predeclared v0.0.0-20200524104333-86fad755b4d3/go.mod h1:nt3d53pc1VYcphSCIaYAJtnPYnr3Zyn8fMq2wvPGPso= -github.com/nishanths/predeclared v0.2.1/go.mod h1:HvkGJcA3naj4lOwnFXFDkFxVtSqQMB9sbB1usJ+xjQE= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= -github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= -github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= -github.com/olekukonko/tablewriter v0.0.1/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= -github.com/olekukonko/tablewriter v0.0.2-0.20190409134802-7e037d187b0c/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= -github.com/olekukonko/tablewriter v0.0.2/go.mod h1:rSAaSIOAGT9odnlyGlUfAJaoc5w2fSBUmeGDbRWPxyQ= -github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= -github.com/onsi/ginkgo v1.16.2/go.mod h1:CObGmKUOKaSC0RjmoAK7tKyn4Azo5P2IWuoMnvwxz1E= github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc= -github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= -github.com/onsi/gomega v1.4.1/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.13.0 h1:7lLHu94wT9Ij0o6EWWclhu0aOh32VxhkwEJvzuWPeak= -github.com/onsi/gomega v1.13.0/go.mod h1:lRk9szgn8TxENtWd0Tp4c3wjlRfMTMH27I+3Je41yGY= +github.com/onsi/gomega v1.20.0 h1:8W0cWlwFkflGPLltQvLRB7ZVD5HuP6ng320w2IS245Q= github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= -github.com/opencontainers/go-digest v1.0.0-rc1 h1:WzifXhOVOEOuFYOJAW6aQqW0TooG2iki3E3Ii+WN7gQ= -github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= -github.com/opencontainers/image-spec v1.0.1 h1:JMemWkRwHx4Zj+fVxWoMCFm/8sYGGrUVojFA6h/TRcI= -github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= -github.com/opencontainers/runc v1.0.2 h1:opHZMaswlyxz1OuGpBE53Dwe4/xF7EZTY0A2L/FpCOg= -github.com/opencontainers/runc v1.0.2/go.mod h1:aTaHFFwQXuA71CiyxOdFFIorAoemI04suvGRQFzWTD0= -github.com/opencontainers/runtime-spec v1.0.3-0.20210326190908-1c3f411f0417/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= -github.com/opencontainers/selinux v1.8.2/go.mod h1:MUIHuUEvKB1wtJjQdOyYRgOnLD2xAPP8dBsCoU0KuF8= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0-rc2 h1:2zx/Stx4Wc5pIPDvIxHXvXtQFW/7XWJGmnM7r3wg034= +github.com/opencontainers/image-spec v1.1.0-rc2/go.mod h1:3OVijpioIKYWTqjiG0zfF6wvoJ4fAXGbjdZuI2NgsRQ= +github.com/opencontainers/runc v1.1.3 h1:vIXrkId+0/J2Ymu2m7VjGvbSlAId9XNRPhn2p4b+d8w= github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= @@ -1564,57 +851,46 @@ github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJ github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= github.com/ory/dockertest v3.3.5+incompatible h1:iLLK6SQwIhcbrG783Dghaaa3WPzGc+4Emza6EbVUUGA= -github.com/ory/dockertest v3.3.5+incompatible/go.mod h1:1vX4m9wsvi00u5bseYwXaSnhNrne+V0E6LAcBILJdPs= -github.com/otiai10/copy v1.6.0 h1:IinKAryFFuPONZ7cm6T6E2QX/vcJwSnlaA5lfoaXIiQ= -github.com/otiai10/copy v1.6.0/go.mod h1:XWfuS3CrI0R6IE0FbgHsEazaXO8G0LpMp9o8tos0x4E= -github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= -github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= -github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= -github.com/otiai10/mint v1.3.2/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pborman/uuid v0.0.0-20170112150404-1b00554d8222/go.mod h1:VyrYX9gd7irzKovcSS6BIIEwPRkP2Wm2m9ufcdFSJ34= github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5dSQ= -github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.0.6/go.mod h1:eumQOmlWiOPt5WriQQqoM5y18pDHwha2N+QD+EUNTek= +github.com/pelletier/go-toml/v2 v2.0.7 h1:muncTPStnKRos5dpVKULv2FVd4bMOhNePj9CjgDb8Us= +github.com/pelletier/go-toml/v2 v2.0.7/go.mod h1:eumQOmlWiOPt5WriQQqoM5y18pDHwha2N+QD+EUNTek= github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= -github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= -github.com/peterh/liner v1.1.1-0.20190123174540-a2c9a5303de7/go.mod h1:CRroGNssyjTd/qIG2FyxByd2S8JEAZXBl4qUrZf8GS0= -github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5 h1:q2e307iGHPdTGp0hoxKjt1H5pDo6utceo3dQVK3I5XQ= github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5/go.mod h1:jvVRKCrJTQWu0XVbaOlby/2lO20uSCHEMzzplHXte1o= -github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw= -github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY= -github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= -github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= +github.com/petermattis/goid v0.0.0-20230317030725-371a4b8eda08 h1:hDSdbBuw3Lefr6R18ax0tZ2BJeNB3NehB3trOwYBsdU= +github.com/petermattis/goid v0.0.0-20230317030725-371a4b8eda08/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4= github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= -github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= +github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= +github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= -github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/pkg/profile v1.6.0 h1:hUDfIISABYI59DyeB3OTay/HxSRwTQ8rB/H83k6r5dM= +github.com/pkg/profile v1.6.0/go.mod h1:qBsxPvzyUincmltOk6iyRVxHYg4adc0OFOv72ZdLa18= github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= -github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/polyfloyd/go-errorlint v0.0.0-20210722154253-910bb7978349/go.mod h1:wi9BfjxjF/bwiZ701TzmfKu6UKC357IOAtNr0Td0Lvw= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= -github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og= github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= -github.com/prometheus/client_golang v1.8.0/go.mod h1:O9VU6huf47PktckDQfMTX0Y8tY0/7TSWwj+ITvv0TnM= -github.com/prometheus/client_golang v1.11.0 h1:HNkLOAEQMIDv/K+04rukrLx6ch7msSRwf3/SASFAGtQ= -github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.14.0 h1:nJdhIvne2eSX/XRAFV9PcvFFRbrjbcTUj0VP62TMhnw= +github.com/prometheus/client_golang v1.14.0/go.mod h1:8vpkKitgIVNcqrRBWh1C4TIUQgYNtG/XQE4E/Zae36Y= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= @@ -1623,121 +899,74 @@ github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6T github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.3.0 h1:UBgGFHqYdG/TPFD1B1ogZywDqEkwp3fBMvqdiQ7Xew4= github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= -github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA= github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= -github.com/prometheus/common v0.14.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s= github.com/prometheus/common v0.15.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s= -github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= -github.com/prometheus/common v0.29.0 h1:3jqPBvKT4OHAbje2Ql7KeaaSicDBCxMYwEJU1zRJceE= -github.com/prometheus/common v0.29.0/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/common v0.42.0 h1:EKsfXEYo4JpWMHH5cg+KOUWeuJSov1Id8zGR8eeI1YM= +github.com/prometheus/common v0.42.0/go.mod h1:xBwqVerjNdUDjgODMpudtOMwlOwf2SaTr1yjz4b7Zbc= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.2.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= github.com/prometheus/procfs v0.3.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.6.0 h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4= -github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/tsdb v0.6.2-0.20190402121629-4f204dcbc150/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= -github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= -github.com/pseudomuto/protoc-gen-doc v1.3.2/go.mod h1:y5+P6n3iGrbKG+9O04V5ld71in3v/bX88wUwgt+U8EA= -github.com/pseudomuto/protokit v0.2.0/go.mod h1:2PdH30hxVHsup8KpBTOXTBeMVhJZVio3Q8ViKSAXT0Q= -github.com/quasilyte/go-consistent v0.0.0-20190521200055-c6f3937de18c/go.mod h1:5STLWrekHfjyYwxBRVRXNOSewLJ3PWfDJd1VyTS21fI= -github.com/quasilyte/go-ruleguard v0.3.1-0.20210203134552-1b5a410e1cc8/go.mod h1:KsAh3x0e7Fkpgs+Q9pNLS5XpFSvYCEVl5gP9Pp1xp30= -github.com/quasilyte/go-ruleguard v0.3.4/go.mod h1:57FZgMnoo6jqxkYKmVj5Fc8vOt0rVzoE/UNAmFFIPqA= -github.com/quasilyte/go-ruleguard/dsl v0.3.0/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= -github.com/quasilyte/go-ruleguard/dsl v0.3.2/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= -github.com/quasilyte/go-ruleguard/rules v0.0.0-20201231183845-9e62ed36efe1/go.mod h1:7JTjp89EGyU1d6XfBiXihJNG37wB2VRkd125Q1u7Plc= -github.com/quasilyte/go-ruleguard/rules v0.0.0-20210203162857-b223e0831f88/go.mod h1:4cgAphtvu7Ftv7vOT2ZOYhC6CvBxZixcasr8qIOTA50= -github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= +github.com/prometheus/procfs v0.9.0 h1:wzCHvIvM5SxWqYvwgVL7yJY8Lz3PKn49KQtpgMYJfhI= +github.com/prometheus/procfs v0.9.0/go.mod h1:+pB4zwohETzFnmlpe6yd2lSc+0/46IYZRB/chUwxUZY= github.com/rakyll/statik v0.1.7 h1:OF3QCZUuyPxuGEP7B4ypUa7sB/iHtqOTDYZXGM8KOdQ= github.com/rakyll/statik v0.1.7/go.mod h1:AlZONWzMtEnMs7W4e/1LURLiI49pIMmp6V9Unghqrcc= github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= -github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0 h1:MkV+77GLUNo5oJ0jf870itWm3D0Sjh7+Za9gazKc5LQ= -github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= -github.com/regen-network/cosmos-proto v0.3.1 h1:rV7iM4SSFAagvy8RiyhiACbWEGotmqzywPxOvwMdxcg= -github.com/regen-network/cosmos-proto v0.3.1/go.mod h1:jO0sVX6a1B36nmE8C9xBFXpNwWejXC7QqCOnH3O0+YM= -github.com/regen-network/protobuf v1.3.3-alpha.regen.1 h1:OHEc+q5iIAXpqiqFKeLpu5NwTIkVXUs48vFMwzqpqY4= -github.com/regen-network/protobuf v1.3.3-alpha.regen.1/go.mod h1:2DjTFR1HhMQhiWC5sZ4OhQ3+NtdbZ6oBDKQwq5Ou+FI= -github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= -github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= -github.com/rjeczalik/notify v0.9.1/go.mod h1:rKwnCoCGeuQnwBtTSPL9Dad03Vh2n40ePRrjvIXnJho= +github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM= +github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.6.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= -github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= -github.com/rs/cors v0.0.0-20160617231935-a62a804a8a00/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= -github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik= +github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= +github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= -github.com/rs/xhandler v0.0.0-20160618193221-ed27b6fd6521/go.mod h1:RvLn4FgxWubrpZHtQLnOf6EwhN2hEMusxZOhcW9H3UQ= -github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= -github.com/rs/zerolog v1.23.0 h1:UskrK+saS9P9Y789yNNulYKdARjPZuS35B8gJF2x60g= -github.com/rs/zerolog v1.23.0/go.mod h1:6c7hFfxPOy7TacJc4Fcdi24/J0NKYGzjG8FWRI916Qo= +github.com/rs/cors v1.8.2 h1:KCooALfAYGs415Cwu5ABvv9n9509fSiG5SQJn/AQo4U= +github.com/rs/cors v1.8.2/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= +github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A= +github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= -github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk= -github.com/ryancurrah/gomodguard v1.2.3/go.mod h1:rYbA/4Tg5c54mV1sv4sQTP5WOPBcoLtnBZ7/TEhXAbg= -github.com/ryanrolds/sqlclosecheck v0.3.0/go.mod h1:1gREqxyTGR3lVtpngyFo3hZAgk0KCtEdgEkHwDbigdA= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= -github.com/sanposhiho/wastedassign/v2 v2.0.6/go.mod h1:KyZ0MWTwxxBmfwn33zh3k1dmsbF2ud9pAAGfoLfjhtI= -github.com/sasha-s/go-deadlock v0.2.1-0.20190427202633-1595213edefa h1:0U2s5loxrTy6/VgfVoLuVLFJcURKLH49ie0zSch7gh4= -github.com/sasha-s/go-deadlock v0.2.1-0.20190427202633-1595213edefa/go.mod h1:F73l+cr82YSh10GxyRI6qZiCgK64VaZjwesgfQ1/iLM= +github.com/sasha-s/go-deadlock v0.3.1 h1:sqv7fDNShgjcaxkO0JNcOAlr8B9+cV5Ey/OB71efZx0= +github.com/sasha-s/go-deadlock v0.3.1/go.mod h1:F73l+cr82YSh10GxyRI6qZiCgK64VaZjwesgfQ1/iLM= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= -github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo= -github.com/securego/gosec/v2 v2.8.1/go.mod h1:pUmsq6+VyFEElJMUX+QB3p3LWNHXg1R3xh2ssVJPs8Q= -github.com/segmentio/fasthash v1.0.3/go.mod h1:waKX8l2N8yckOgmSsXJi7x1ZfdKZ4x7KRMzBtS3oedY= -github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= -github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c/go.mod h1:/PevMnwAxekIXwN8qQyfc5gl2NlkB3CQlkizAbOkeBs= -github.com/shirou/gopsutil v2.20.5+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA= -github.com/shirou/gopsutil/v3 v3.21.7/go.mod h1:RGl11Y7XMTQPmHh8F0ayC6haKNBgH4PXMJuTAcMOlz4= -github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= -github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= -github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= +github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= +github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= -github.com/snikch/goodman v0.0.0-20171125024755-10e37e294daa/go.mod h1:oJyF+mSPHbB5mVY2iO9KV3pTt/QbIkGaO8gQ2WrDbP4= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= -github.com/sonatard/noctx v0.0.1/go.mod h1:9D2D/EoULe8Yy2joDHJj7bv3sZoq9AaSb8B4lqBjiZI= github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= -github.com/sourcegraph/go-diff v0.6.1/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= -github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= -github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= github.com/spf13/afero v1.9.2 h1:j49Hj62F0n+DaZ1dDCvhABaPNSGNkt32oRFxI33IEMw= github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= +github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= -github.com/spf13/cobra v1.1.1/go.mod h1:WnodtKOvamDL/PwE2M4iKs8aMDBZ5Q5klgD3qfVJQMI= -github.com/spf13/cobra v1.2.1 h1:+KmjbUw1hriSNMF55oPrkZcb27aECyrj8V2ytv7kWDw= -github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= +github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA= +github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= @@ -1746,25 +975,16 @@ github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnIn github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= -github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= -github.com/spf13/viper v1.7.1/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= -github.com/spf13/viper v1.8.1 h1:Kq1fyeebqsBfbjZj4EL7gj2IO0mMaiyjYUWcUsl2O44= -github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= -github.com/ssgreg/nlreturn/v2 v2.1.0/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I= -github.com/status-im/keycard-go v0.0.0-20190316090335-8537d3370df4/go.mod h1:RZLeN1LMWmRsyYjvAu+I6Dm9QmlDaIIt+Y+4Kd7Tp+Q= -github.com/steakknife/bloomfilter v0.0.0-20180922174646-6819c0d2a570/go.mod h1:8OR4w3TdeIHIh1g6EMY5p0gVNOovcWC+1vpc7naMuAw= -github.com/steakknife/hamming v0.0.0-20180906055917-c99c65617cd3/go.mod h1:hpGUWaI9xL8pRQCTXQgocU38Qw1g0Us7n5PxxTwTCYU= +github.com/spf13/viper v1.14.0 h1:Rg7d3Lo706X9tHsJMUjdiwMpHB7W8WnSVOssIY+JElU= +github.com/spf13/viper v1.14.0/go.mod h1:WT//axPky3FdvXHzGw33dNdXXXfFQqmEalje+egj8As= github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/testify v0.0.0-20170130113145-4d4bfba8f1d1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.1.4/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -1774,95 +994,46 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= -github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= -github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= -github.com/syndtr/goleveldb v1.0.1-0.20200815110645-5c35d600f0ca h1:Ld/zXl5t4+D69SiV4JoN7kkfvJdOWlPpfxrzxpLMoUk= -github.com/syndtr/goleveldb v1.0.1-0.20200815110645-5c35d600f0ca/go.mod h1:u2MKkTVTVJWe5D1rCvame8WqhBd88EuIwODJZ1VHCPM= -github.com/tdakkota/asciicheck v0.0.0-20200416200610-e657995f937b/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM= -github.com/tecbot/gorocksdb v0.0.0-20191217155057-f0fad39f321c h1:g+WoO5jjkqGAzHWCjJB1zZfXPIAaDpzXIEJ0eS6B5Ok= -github.com/tecbot/gorocksdb v0.0.0-20191217155057-f0fad39f321c/go.mod h1:ahpPrc7HpcfEWDQRZEmnXMzHY03mLDYMCxeDzy46i+8= -github.com/tendermint/btcd v0.1.1 h1:0VcxPfflS2zZ3RiOAHkBiFUcPvbtRj5O7zHmcJWHV7s= -github.com/tendermint/btcd v0.1.1/go.mod h1:DC6/m53jtQzr/NFmMNEu0rxf18/ktVoVtMrnDD5pN+U= -github.com/tendermint/crypto v0.0.0-20191022145703-50d29ede1e15 h1:hqAk8riJvK4RMWx1aInLzndwxKalgi5rTqgfXxOxbEI= -github.com/tendermint/crypto v0.0.0-20191022145703-50d29ede1e15/go.mod h1:z4YtwM70uOnk8h0pjJYlj3zdYwi9l03By6iAIF5j/Pk= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs= +github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= +github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY= +github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc= github.com/tendermint/go-amino v0.16.0 h1:GyhmgQKvqF82e2oZeuMSp9JTN0N09emoSZlb2lyGa2E= github.com/tendermint/go-amino v0.16.0/go.mod h1:TQU0M1i/ImAo+tYpZi73AU3V/dKeCoMC9Sphe2ZwGME= -github.com/tendermint/spm v0.1.9 h1:O1DJF4evS8wgk5SZqRcO29irNNtKQmTpvQ0xFzUiczI= -github.com/tendermint/spm v0.1.9/go.mod h1:iHgfQ5YOI6ONc9E7ugGQolVdfSMHpeXfZ/OpXuN/42Q= -github.com/tendermint/tm-db v0.6.4 h1:3N2jlnYQkXNQclQwd/eKV/NzlqPlfK21cpRRIx80XXQ= -github.com/tendermint/tm-db v0.6.4/go.mod h1:dptYhIpJ2M5kUuenLr+Yyf3zQOv1SgBZcl8/BmWlMBw= -github.com/tetafro/godot v1.4.9/go.mod h1:LR3CJpxDVGlYOWn3ZZg1PgNZdTUvzsZWu8xaEohUpn8= -github.com/tidwall/gjson v1.6.7/go.mod h1:zeFuBCIqD4sN/gmqBzZ4j7Jd6UcA2Fc56x7QFsv+8fI= -github.com/tidwall/match v1.0.3/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= -github.com/tidwall/pretty v1.0.2/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= -github.com/tidwall/sjson v1.1.4/go.mod h1:wXpKXu8CtDjKAZ+3DrKY5ROCorDFahq8l0tey/Lx1fg= -github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk= -github.com/tklauser/go-sysconf v0.3.7/go.mod h1:JZIdXh4RmBvZDBZ41ld2bGxRV3n4daiiqA3skYhAoQ4= -github.com/tklauser/numcpus v0.2.3/go.mod h1:vpEPS/JC+oZGGQ/My/vJnNsvMDQL6PwOqt8dsCw5j+E= +github.com/tidwall/btree v1.6.0 h1:LDZfKfQIBHGHWSwckhXI0RPSXzlo+KYdjK7FWSqOzzg= +github.com/tidwall/btree v1.6.0/go.mod h1:twD9XRA5jj9VUQGELzDO4HPQTNJsoWWfYEL+EUQ2cKY= github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tmc/grpc-websocket-proxy v0.0.0-20200427203606-3cfed13b9966/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tomarrell/wrapcheck/v2 v2.3.0/go.mod h1:aF5rnkdtqNWP/gC7vPUO5pKsB0Oac2FDTQP4F+dpZMU= -github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce/go.mod h1:o8v6yHRoik09Xen7gje4m9ERNah1d1PPsVq1VEx9vE4= -github.com/tommy-muehle/go-mnd/v2 v2.4.0/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= -github.com/tyler-smith/go-bip39 v1.0.1-0.20181017060643-dbb3b84ba2ef/go.mod h1:sJ5fKU0s6JVwZjjcUEX2zFOnvq0ASQ2K9Zr6cf67kNs= -github.com/tyler-smith/go-bip39 v1.0.2/go.mod h1:sJ5fKU0s6JVwZjjcUEX2zFOnvq0ASQ2K9Zr6cf67kNs= -github.com/ugorji/go v1.1.7 h1:/68gy2h+1mWMrwZFeD1kQialdSzAb432dtpeJ42ovdo= -github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= -github.com/ugorji/go/codec v1.1.7 h1:2SvQaVZ1ouYrrKKwoSk2pzd4A9evlKJb9oTL+OaLUSs= -github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= -github.com/ultraware/funlen v0.0.3/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA= -github.com/ultraware/whitespace v0.0.4/go.mod h1:aVMh/gQve5Maj9hQ/hg+F75lr/X5A89uZnzAmWSineA= +github.com/ugorji/go/codec v1.2.9 h1:rmenucSohSTiyL09Y+l2OCk+FrMxGMzho2+tjr5ticU= +github.com/ugorji/go/codec v1.2.9/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/ulikunitz/xz v0.5.10/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= +github.com/ulikunitz/xz v0.5.11 h1:kpFauv27b6ynzBNT/Xy+1k+fK4WswhN/6PN5WhFAGw8= +github.com/ulikunitz/xz v0.5.11/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/uudashr/gocognit v1.0.5/go.mod h1:wgYz0mitoKOTysqxTDMOUXg+Jb5SvtihkfmugIZYpEA= -github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasthttp v1.16.0/go.mod h1:YOKImeEosDdBPnxc0gy7INqi3m1zK6A+xl6TwOBhHCA= -github.com/valyala/quicktemplate v1.6.3/go.mod h1:fwPzK2fHuYEODzJ9pkw0ipCPNHZ2tD5KW4lOuSdPKzY= -github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= -github.com/viki-org/dnscache v0.0.0-20130720023526-c70c1f23c5d8/go.mod h1:dniwbG03GafCjFohMDmz6Zc6oCuiqgH6tGNyXTkHzXE= -github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE= -github.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU= -github.com/vmihailenco/msgpack/v5 v5.1.4/go.mod h1:C5gboKD0TJPqWDTVTtrQNfRbiBwHZGo8UTqP/9/XvLI= -github.com/vmihailenco/tagparser v0.1.2/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= -github.com/wsddn/go-ecdh v0.0.0-20161211032359-48726bab9208/go.mod h1:IotVbo4F+mw0EzQ08zFqg7pK3FebNXpaMsRy2RT+Ees= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= -github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778/go.mod h1:2MuV+tbUrU1zIOPMxZ5EncGwgmMJsa+9ucAQZXxsObs= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= -github.com/ybbus/jsonrpc v2.1.2+incompatible/go.mod h1:XJrh1eMSzdIYFbM08flv0wp5G35eRniyeGut1z+LSiE= -github.com/yeya24/promlinter v0.1.0/go.mod h1:rs5vtZzeBHqqMwXqFScncpCF6u06lezhZepno9AB1Oc= -github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= -github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= -github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= -github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= -github.com/zigbee-alliance/tendermint v0.34.140 h1:QqY9+zNDZNtYGte62UnrBWuJNsu0zSizqA3FY4E2Jp4= -github.com/zigbee-alliance/tendermint v0.34.140/go.mod h1:FrwVm3TvsVicI9Z7FlucHV6Znfd5KBc/Lpp69cCwtk0= -github.com/zondax/hid v0.9.0 h1:eiT3P6vNxAEVxXMw66eZUAAnU2zD33JBkfG/EnfAKl8= -github.com/zondax/hid v0.9.0/go.mod h1:l5wttcP0jwtdLjqjMMWFVEE7d1zO0jvSPA9OPZxWpEM= -go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +github.com/zigbee-alliance/cometbft v0.37.5 h1:vTE6bN8SndgGk3DDieT+xXI+M/Bprtfx8fawkUsNQiU= +github.com/zigbee-alliance/cometbft v0.37.5/go.mod h1:Cmg5Hp4sNpapm7j+x0xRyt2g0juQfmB752ous+pA0G8= +github.com/zondax/hid v0.9.2 h1:WCJFnEDMiqGF64nlZz28E9qLVZ0KSJ7xpc5DLEyma2U= +github.com/zondax/hid v0.9.2/go.mod h1:l5wttcP0jwtdLjqjMMWFVEE7d1zO0jvSPA9OPZxWpEM= +github.com/zondax/ledger-go v0.14.3 h1:wEpJt2CEcBJ428md/5MgSLsXLBos98sBOyxNmCjfUCw= +github.com/zondax/ledger-go v0.14.3/go.mod h1:IKKaoxupuB43g4NxeQmbLXv7T9AlQyie1UpHb342ycI= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= -go.etcd.io/bbolt v1.3.4/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= -go.etcd.io/bbolt v1.3.5 h1:XAzx9gjCb0Rxj7EoqcClPD1d5ZBxZJk0jbuoPHenBt0= -go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= +go.etcd.io/bbolt v1.3.7 h1:j+zJOnnEjF/kyHlDDgGnVL/AIqIJPq8UoB2GSNfkUfQ= +go.etcd.io/bbolt v1.3.7/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= -go.etcd.io/etcd v0.0.0-20200513171258-e048e166ab9c/go.mod h1:xCI7ZzBfRuGgBXyXO6yfWfDmlWd35khcWpUa4L0xI/k= -go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= -go.mozilla.org/mozlog v0.0.0-20170222151521-4bb13139d403/go.mod h1:jHoPAGnDrCy6kaI2tAze5Prf0Nr0w/oNkROt2lw3n3o= go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= @@ -1872,65 +1043,57 @@ go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.36.3 h1:syAz40OyelLZo42+3U68Phisvrx4qh+4wpdZw7eUUdY= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.36.3/go.mod h1:Dts42MGkzZne2yCru741+bFiTMWkIj/LLRizad7b9tw= +go.opentelemetry.io/otel v1.19.0 h1:MuS/TNf4/j4IXsZuJegVzI1cwut7Qc00344rgH7p8bs= +go.opentelemetry.io/otel v1.19.0/go.mod h1:i0QyjOq3UPoTzff0PJB2N66fb4S0+rSbSB15/oyH9fY= +go.opentelemetry.io/otel/metric v1.19.0 h1:aTzpGtV0ar9wlV4Sna9sdJyII5jTVJEvKETPiOKwvpE= +go.opentelemetry.io/otel/metric v1.19.0/go.mod h1:L5rUsV9kM1IxCj1MmSdS+JQAcVm319EUrDVLrt7jqt8= +go.opentelemetry.io/otel/sdk v1.19.0 h1:6USY6zH+L8uMH8L3t1enZPR3WFEmSTADlqldyHtJi3o= +go.opentelemetry.io/otel/trace v1.19.0 h1:DFVQmlVbfVeOuBRrwdtaehRrWiL1JoVs9CPIQ1Dzxpg= +go.opentelemetry.io/otel/trace v1.19.0/go.mod h1:mfaSyvGyEJEI0nyV2I4qhNQnbBOUUmYZpYojqMnX2vo= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= -go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= -go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ= +go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= +go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= -go.uber.org/multierr v1.4.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= -go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/multierr v1.8.0 h1:dg6GjLku4EH+249NNmoIciG9N/jURbDG+pFlTkhzIC8= +go.uber.org/multierr v1.8.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= -go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20180501155221-613d6eafa307/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +go.uber.org/zap v1.23.0 h1:OjGQ5KQDEUawVHxNwQgPpiypGHOxo2mNZsOqTak4fFY= +go.uber.org/zap v1.23.0/go.mod h1:D+nX8jyLsMHMYrln8A0rJjFt/T/9/bGgIhAqxv5URuY= +golang.org/x/arch v0.0.0-20210923205945-b76863e36670 h1:18EFjUmQOcUvxNYSkA6jO9VAiXCnxFY6NyDX0bHDmkU= +golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190909091759-094676da4a83/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201117144127-c1f2f97bffc9/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= -golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= -golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= -golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= -golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= -golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA= -golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= -golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/crypto v0.5.0/go.mod h1:NK/OQwhpMQP3MwtdjgLlYHnH9ebylxKWv3e0fK+mkQU= +golang.org/x/crypto v0.16.0 h1:mMMrFzRSCF0GvB7Ne27XVtVAaXLrPmgPC7/v0tkwHaY= +golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190731235908-ec7cb31e5a56/go.mod h1:JhuoJpWY28nO4Vef9tZUw9qufEGTyX1+7lmHxV5q5G4= golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191002040644-a1355ae1e2c3/go.mod h1:NOZ3BPKG0ec/BKJQgnvsSFpcKLM5xXVWnvZS97DWHgE= golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= @@ -1938,20 +1101,11 @@ golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= -golang.org/x/exp v0.0.0-20220827204233-334a2380cb91/go.mod h1:cyybsKvd6eL0RnXn6p/Grxp8F5bW7iYuBgsNCOHpMYE= -golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= +golang.org/x/exp v0.0.0-20230711153332-06a737ee72cb h1:xIApU0ow1zwMa2uL1VDNeQlNVFTWMQxZUZCMDy0Q4Us= +golang.org/x/exp v0.0.0-20230711153332-06a737ee72cb/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.0.0-20200119044424-58c23975cae1/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.0.0-20200430140353-33d19683fad8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.0.0-20200618115811-c13761719519/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.0.0-20201208152932-35266b937fa6/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.0.0-20210216034530-4410531fe030/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.0.0-20210607152325-775e3b0c77b9/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= -golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= -golang.org/x/image v0.0.0-20211028202545-6944b10bf410/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= -golang.org/x/image v0.0.0-20220302094943-723b81ca9867/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= @@ -1965,28 +1119,21 @@ golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPI golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mobile v0.0.0-20200801112145-973feb4309de/go.mod h1:skQtrUTUwhdJvXM/2KKJzY8pDgNr9I/FOMqDVRPBUS4= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191209134235-331c550502dd/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= -golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/net v0.0.0-20180719180050-a680a1efc54d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/mod v0.11.0 h1:bUO06HqtnRcc/7l71XBe4WcqTZ+3AH1J59zWDDwLKgU= +golang.org/x/mod v0.11.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181011144130-49bb7cea24b1/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -2004,8 +1151,6 @@ golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191002035440-2ec189313ef0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -2019,12 +1164,10 @@ golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/ golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200904194848-62affa334b73/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= @@ -2034,13 +1177,8 @@ golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210903162142-ad29c8ab022f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= @@ -2051,18 +1189,12 @@ golang.org/x/net v0.0.0-20220617184016-355a448f1bc9/go.mod h1:XRhObCWvk6IyKnWLug golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220909164309-bea034e7d591/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= -golang.org/x/net v0.0.0-20221012135044-0b7e1fb9d458/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/net v0.0.0-20221014081412-f15817d10f9b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= -golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= -golang.org/x/net v0.4.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= -golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= -golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= -golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50= -golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= +golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= +golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -2074,7 +1206,6 @@ golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= @@ -2087,17 +1218,14 @@ golang.org/x/oauth2 v0.0.0-20220608161450-d0670ef3b1eb/go.mod h1:jaDAt6Dkxork7Lm golang.org/x/oauth2 v0.0.0-20220622183110-fd043fe589d2/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE= golang.org/x/oauth2 v0.0.0-20220822191816-0ebed06d0094/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/oauth2 v0.0.0-20220909003341-f21342109be1/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= -golang.org/x/oauth2 v0.0.0-20221006150949-b44042a4b9c1/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= -golang.org/x/oauth2 v0.5.0/go.mod h1:9/XBHVqLaWO3/BRHs5jbpYCnOZVjj5V0ndyaAM7KB4I= -golang.org/x/oauth2 v0.6.0/go.mod h1:ycmewcwgD4Rpr3eZJLSB4Kyyljb3qDh40vJ8STE5HKw= -golang.org/x/oauth2 v0.7.0/go.mod h1:hPLQkd9LyjfXTiRohC/41GhcFqxisoUQ99sCUOHO9x4= -golang.org/x/oauth2 v0.8.0/go.mod h1:yr7u4HXZRm1R1kBWqr/xKNqewf0plRYoB7sla+BCIXE= +golang.org/x/oauth2 v0.1.0/go.mod h1:G9FE4dLTsbXUu90h/Pf85g4w1D+SSAgR+q46nJZ8M4A= +golang.org/x/oauth2 v0.15.0 h1:s8pnnxNVzjWyrvYdFUQq5llS1PX2zhPXmccZv99h7uQ= +golang.org/x/oauth2 v0.15.0/go.mod h1:q48ptWNTY5XWf+JNten23lcvHpLJ0ZSxF5ttTHKVCAM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -2107,11 +1235,11 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= +golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -2121,34 +1249,26 @@ golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190130150945-aca44879d564/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606203320-7fc4e5ec1444/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190712062909-fae7ac547cb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -2162,53 +1282,44 @@ golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200602225109-6fdc65e7d980/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200814200057-3d37ad5750ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200824131525-c12d262b63d8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200909081042-eff7692f9009/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201015000850-e3ed0017c211/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210304124612-50617c2ba197/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210819135213-f52c844e1c1c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210903071746-97244b99971b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220315194320-039c03cc5b86/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -2217,31 +1328,26 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= -golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= +golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= -golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= +golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= -golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= -golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= -golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= -golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c= -golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= +golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4= +golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -2253,62 +1359,46 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= -golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20220922220347-f3bd1da661af/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= +golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190307163923-6a08e3108db3/go.mod h1:25r3+/G6/xytQM8iWZKq3Hn0kr0rgFKPUNVEL/dr3z4= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190321232350-e250d351ecad/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190916130336-e45ffcd953cc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190927191325-030b2cf1153e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191010075000-0337d82405ff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117012304-6edc0a871e69/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117220505-0cba7a3a9ee9/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -2318,47 +1408,19 @@ golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapK golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200414032229-332987a829c3/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200426102838-f3a5411a4c3b/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200522201501-cb1345f3a375/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200624225443-88f3c62a19ff/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200625211823-6506e20df31f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200626171337-aa94e735be7f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200630154851-b2d8b0336632/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200706234117-b22de6825cf7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200717024301-6ddee64345a6/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200812195022-5ae4c3c160a0/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200831203904-5a2aa26beb65/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20201001104356-43ebab892c4c/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20201002184944-ecd9fd270d5d/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201028025901-8cd080b735b3/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201114224030-61ea331ec02b/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201118003311-bd56c0adb394/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201124115921-2c860bdd6e78/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201230224404-63754364767c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210101214203-2dba1e4ea05c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210104081019-d8d6ddbec6ee/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= @@ -2368,12 +1430,9 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= +golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= -golang.org/x/tools v0.9.1/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -2381,21 +1440,13 @@ golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= -gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo= -gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0= -gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0= -gonum.org/v1/gonum v0.11.0/go.mod h1:fSG4YDCxxUZQJ7rKsQrj0gMOg00Il0Z96/qMA4bVQhA= -gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= -gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc= -gonum.org/v1/plot v0.9.0/go.mod h1:3Pcqqmp6RHvJI72kgb8fThyUnav364FOsdDo2aGW5lY= -gonum.org/v1/plot v0.10.1/go.mod h1:VZW5OlhkL1mysU9vaqNHnsy86inf6Ot+jB3r+BczCEo= google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.10.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= @@ -2413,7 +1464,6 @@ google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34q google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= -google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= @@ -2440,33 +1490,21 @@ google.golang.org/api v0.95.0/go.mod h1:eADj+UBuxkh5zlrSntJghuNeg8HwQ1w5lTKkuqaE google.golang.org/api v0.96.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s= google.golang.org/api v0.97.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s= google.golang.org/api v0.98.0/go.mod h1:w7wJQLTM+wvQpNf5JyEcBoxK0RH7EDrh/L4qfsuJ13s= -google.golang.org/api v0.99.0/go.mod h1:1YOf74vkVndF7pG6hIHuINsM7eWwpVTAfNMNiL91A08= google.golang.org/api v0.100.0/go.mod h1:ZE3Z2+ZOr87Rx7dqFsdRQkRBk36kDtp/h+QpHbB7a70= -google.golang.org/api v0.102.0/go.mod h1:3VFl6/fzoA+qNuS1N1/VfXY4LjoXN/wzeIp7TweWwGo= -google.golang.org/api v0.103.0/go.mod h1:hGtW6nK1AC+d9si/UBhw8Xli+QMOf6xyNAyJw4qU9w0= -google.golang.org/api v0.106.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/O9MY= -google.golang.org/api v0.107.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/O9MY= -google.golang.org/api v0.108.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/O9MY= -google.golang.org/api v0.110.0/go.mod h1:7FC4Vvx1Mooxh8C5HWjzZHcavuS2f6pmJpZx60ca7iI= -google.golang.org/api v0.111.0/go.mod h1:qtFHvU9mhgTJegR31csQ+rwxyUTHOKFqCKWp1J0fdw0= -google.golang.org/api v0.114.0/go.mod h1:ifYI2ZsFK6/uGddGfAD5BMxlnkBqCmqHSDUVi45N5Yg= -google.golang.org/api v0.118.0/go.mod h1:76TtD3vkgmZ66zZzp72bUUklpmQmKlhh6sYtIjYK+5E= -google.golang.org/api v0.122.0/go.mod h1:gcitW0lvnyWjSp9nKxAbdHKIZ6vF4aajGueeslZOyms= -google.golang.org/api v0.124.0/go.mod h1:xu2HQurE5gi/3t1aFCvhPD781p0a3p11sdunTJ2BlP4= -google.golang.org/api v0.125.0/go.mod h1:mBwVAtz+87bEN6CbA1GtZPDOqY2R5ONPqJeIlvyo4Aw= -google.golang.org/api v0.126.0/go.mod h1:mBwVAtz+87bEN6CbA1GtZPDOqY2R5ONPqJeIlvyo4Aw= +google.golang.org/api v0.149.0 h1:b2CqT6kG+zqJIVKRQ3ELJVLN1PwHZ6DJ3dW8yl82rgY= +google.golang.org/api v0.149.0/go.mod h1:Mwn1B7JTXrzXtnvmzQE2BD6bYZQ8DShKZDZbeN9I7qI= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.2/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20170818010345-ee236bd376b0/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= +google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20181107211654-5fc9ac540362/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -2475,7 +1513,6 @@ google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dT google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20190927181202-20e1ac93f88c/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= @@ -2488,7 +1525,6 @@ google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200324203455-a04cca1dde73/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= @@ -2497,14 +1533,11 @@ google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200626011028-ee7919e894b5/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200707001353-8e8330bf89df/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201119123407-9b1e624d6bc4/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= @@ -2543,8 +1576,8 @@ google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2 google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220314164441-57ef72a4c106/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= -google.golang.org/genproto v0.0.0-20220329172620-7be39ac1afc7/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= @@ -2577,59 +1610,57 @@ google.golang.org/genproto v0.0.0-20220926220553-6981cbe3cfce/go.mod h1:woMGP53B google.golang.org/genproto v0.0.0-20221010155953-15ba04fc1c0e/go.mod h1:3526vdqwhZAwq4wsRUaVG555sVgsNmIjRtO7t/JH29U= google.golang.org/genproto v0.0.0-20221014173430-6e2ab493f96b/go.mod h1:1vXfmgAz9N9Jx0QA82PqRVauvCz1SGSz739p0f183jM= google.golang.org/genproto v0.0.0-20221014213838-99cd37c6964a/go.mod h1:1vXfmgAz9N9Jx0QA82PqRVauvCz1SGSz739p0f183jM= -google.golang.org/genproto v0.0.0-20221024153911-1573dae28c9c/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s= -google.golang.org/genproto v0.0.0-20221024183307-1bc688fe9f3e/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s= -google.golang.org/genproto v0.0.0-20221027153422-115e99e71e1c/go.mod h1:CGI5F/G+E5bKwmfYo09AXuVN4dD894kIKUFmVbP2/Fo= -google.golang.org/genproto v0.0.0-20221109142239-94d6d90a7d66/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= -google.golang.org/genproto v0.0.0-20221114212237-e4508ebdbee1/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= -google.golang.org/genproto v0.0.0-20221117204609-8f9c96812029/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= -google.golang.org/genproto v0.0.0-20221118155620-16455021b5e6/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= -google.golang.org/genproto v0.0.0-20221201164419-0e50fba7f41c/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= -google.golang.org/genproto v0.0.0-20221201204527-e3fa12d562f3/go.mod h1:rZS5c/ZVYMaOGBfO68GWtjOw/eLaZM1X6iVtgjZ+EWg= -google.golang.org/genproto v0.0.0-20221202195650-67e5cbc046fd/go.mod h1:cTsE614GARnxrLsqKREzmNYJACSWWpAWdNMwnD7c2BE= -google.golang.org/genproto v0.0.0-20221227171554-f9683d7f8bef/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230110181048-76db0878b65f/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230112194545-e10362b5ecf9/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230113154510-dbe35b8444a5/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230123190316-2c411cf9d197/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230124163310-31e0e69b6fc2/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230125152338-dcaf20b6aeaa/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230127162408-596548ed4efa/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230209215440-0dfe4f8abfcc/go.mod h1:RGgjbofJ8xD9Sq1VVhDM1Vok1vRONV+rg+CjzG4SZKM= -google.golang.org/genproto v0.0.0-20230216225411-c8e22ba71e44/go.mod h1:8B0gmkoRebU8ukX6HP+4wrVQUY1+6PkQ44BSyIlflHA= -google.golang.org/genproto v0.0.0-20230222225845-10f96fb3dbec/go.mod h1:3Dl5ZL0q0isWJt+FVcfpQyirqemEuLAK/iFvg1UP1Hw= -google.golang.org/genproto v0.0.0-20230223222841-637eb2293923/go.mod h1:3Dl5ZL0q0isWJt+FVcfpQyirqemEuLAK/iFvg1UP1Hw= -google.golang.org/genproto v0.0.0-20230303212802-e74f57abe488/go.mod h1:TvhZT5f700eVlTNwND1xoEZQeWTB2RY/65kplwl/bFA= -google.golang.org/genproto v0.0.0-20230306155012-7f2fa6fef1f4/go.mod h1:NWraEVixdDnqcqQ30jipen1STv2r/n24Wb7twVTGR4s= -google.golang.org/genproto v0.0.0-20230320184635-7606e756e683/go.mod h1:NWraEVixdDnqcqQ30jipen1STv2r/n24Wb7twVTGR4s= -google.golang.org/genproto v0.0.0-20230323212658-478b75c54725/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= -google.golang.org/genproto v0.0.0-20230330154414-c0448cd141ea/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= -google.golang.org/genproto v0.0.0-20230331144136-dcfb400f0633/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= -google.golang.org/genproto v0.0.0-20230403163135-c38d8f061ccd/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= -google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= -google.golang.org/genproto v0.0.0-20230525234025-438c736192d0/go.mod h1:9ExIQyXL5hZrHzQceCwuSYwZZ5QZBazOcprJ5rgs3lY= -google.golang.org/genproto v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:xZnkP7mREFX5MORlOPEzLMr+90PPZQ2QWzrVTWfAq64= -google.golang.org/genproto v0.0.0-20230629202037-9506855d4529/go.mod h1:xZnkP7mREFX5MORlOPEzLMr+90PPZQ2QWzrVTWfAq64= -google.golang.org/genproto v0.0.0-20230706204954-ccb25ca9f130 h1:Au6te5hbKUV8pIYWHqOUZ1pva5qK/rwbIhoXEUB9Lu8= -google.golang.org/genproto v0.0.0-20230706204954-ccb25ca9f130/go.mod h1:O9kGHb51iE/nOGvQaDUuadVYqovW56s5emA88lQnj6Y= -google.golang.org/genproto/googleapis/api v0.0.0-20230525234020-1aefcd67740a/go.mod h1:ts19tUU+Z0ZShN1y3aPyq2+O3d5FUNNgT6FtOzmrNn8= -google.golang.org/genproto/googleapis/api v0.0.0-20230525234035-dd9d682886f9/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig= -google.golang.org/genproto/googleapis/api v0.0.0-20230526203410-71b5a4ffd15e/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig= -google.golang.org/genproto/googleapis/api v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig= -google.golang.org/genproto/googleapis/api v0.0.0-20230629202037-9506855d4529/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig= -google.golang.org/genproto/googleapis/api v0.0.0-20230724170836-66ad5b6ff146 h1:P60zJj7Yxq1VhZIxpRO7A5lDFyy07D6Dqa+HCixuFBM= -google.golang.org/genproto/googleapis/api v0.0.0-20230724170836-66ad5b6ff146/go.mod h1:rsr7RhLuwsDKL7RmgDDCUc6yaGr1iqceVb5Wv6f6YvQ= -google.golang.org/genproto/googleapis/bytestream v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:ylj+BE99M198VPbBh6A8d9n3w8fChvyLK3wwBOjXBFA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234015-3fc162c6f38a/go.mod h1:xURIpW9ES5+/GZhnV6beoEtxQrnkRGIfP5VQG2tCBLc= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234030-28d5490b6b19/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230526203410-71b5a4ffd15e/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230629202037-9506855d4529/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230706204954-ccb25ca9f130/go.mod h1:8mL13HKkDa+IuJ8yruA3ci0q+0vsUz4m//+ottjwS5o= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230724170836-66ad5b6ff146 h1:0PjALPu/U/4OVXKQM2P8b8NJGd4V+xbZSP+uuBJpGm0= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230724170836-66ad5b6ff146/go.mod h1:TUfxEVdsvPg18p6AslUXFoLdpED4oBnGwyqk3dV1XzM= -google.golang.org/grpc v1.33.2 h1:EQyQC3sa8M+p6Ulc8yy9SWSS2GVwyRc83gAbG8lrl4o= +google.golang.org/genproto v0.0.0-20221025140454-527a21cfbd71/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s= +google.golang.org/genproto v0.0.0-20240102182953-50ed04b92917 h1:nz5NESFLZbJGPFxDT/HCn+V1mZ8JGNoY4nUpmW/Y2eg= +google.golang.org/genproto v0.0.0-20240102182953-50ed04b92917/go.mod h1:pZqR+glSb11aJ+JQcczCvgf47+duRuzNSKqE8YAQnV0= +google.golang.org/genproto/googleapis/api v0.0.0-20240102182953-50ed04b92917 h1:rcS6EyEaoCO52hQDupoSfrxI3R6C2Tq741is7X8OvnM= +google.golang.org/genproto/googleapis/api v0.0.0-20240102182953-50ed04b92917/go.mod h1:CmlNWB9lSezaYELKS5Ym1r44VrrbPUa7JTvw+6MbpJ0= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240108191215-35c7eff3a6b1 h1:gphdwh0npgs8elJ4T6J+DQJHPVF7RsuJHCfwztUb4J4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240108191215-35c7eff3a6b1/go.mod h1:daQN87bsDqDoe316QbbvX60nMoJQa4r6Ds0ZuoAe5yA= +google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.32.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.47.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= +google.golang.org/grpc v1.50.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= +google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= +google.golang.org/grpc v1.60.1 h1:26+wFr+cNqSGFcOXcabYC0lUVJVRa2Sb2ortSK7VrEU= +google.golang.org/grpc v1.60.1/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 h1:M1YKkFIboKNieVO5DLUEVzQfGwJD30Nv2jfUgzb5UcE= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -2641,39 +1672,31 @@ google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2 google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.25.1-0.20200805231151-a709e31e5d12/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.29.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= -google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= +google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= -gopkg.in/cheggaaa/pb.v1 v1.0.28/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/cheggaaa/pb.v1 v1.0.27/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= -gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/ini.v1 v1.62.0 h1:duBzk771uxoUuOlyRLkHsygud9+5lrlGjdFBb4mSKDU= -gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce/go.mod h1:5AcXVHNjg+BDxry382+8OKon8SEWiKktQR07RKPsv1c= -gopkg.in/olebedev/go-duktape.v3 v3.0.0-20200619000410-60c24ae608a6/go.mod h1:uAJfkITjFhyEEuUfm7bsmCZRbW5WRq8s9EY8HZ6hCns= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/urfave/cli.v1 v1.20.0/go.mod h1:vuBzUtMdQeixQj8LVd+/98pzhxNGQoyuPBlsXHOQNO0= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= @@ -2681,7 +1704,6 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.6/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= @@ -2690,7 +1712,10 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= +gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo= +gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= +gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= +honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -2698,65 +1723,10 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las= -honnef.co/go/tools v0.2.1/go.mod h1:lPVVZ2BS5TfnjLyizF7o7hv7j9/L+8cZY2hLyjP9cGY= -lukechampine.com/uint128 v1.1.1/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= -lukechampine.com/uint128 v1.2.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= -modernc.org/cc/v3 v3.36.0/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= -modernc.org/cc/v3 v3.36.2/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= -modernc.org/cc/v3 v3.36.3/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI= -modernc.org/cc/v3 v3.37.0/go.mod h1:vtL+3mdHx/wcj3iEGz84rQa8vEqR6XM84v5Lcvfph20= -modernc.org/cc/v3 v3.40.0/go.mod h1:/bTg4dnWkSXowUO6ssQKnOV0yMVxDYNIsIrzqTFDGH0= -modernc.org/ccgo/v3 v3.0.0-20220428102840-41399a37e894/go.mod h1:eI31LL8EwEBKPpNpA4bU1/i+sKOwOrQy8D87zWUcRZc= -modernc.org/ccgo/v3 v3.0.0-20220430103911-bc99d88307be/go.mod h1:bwdAnOoaIt8Ax9YdWGjxWsdkPcZyRPHqrOvJxaKAKGw= -modernc.org/ccgo/v3 v3.0.0-20220904174949-82d86e1b6d56/go.mod h1:YSXjPL62P2AMSxBphRHPn7IkzhVHqkvOnRKAKh+W6ZI= -modernc.org/ccgo/v3 v3.16.4/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ= -modernc.org/ccgo/v3 v3.16.6/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ= -modernc.org/ccgo/v3 v3.16.8/go.mod h1:zNjwkizS+fIFDrDjIAgBSCLkWbJuHF+ar3QRn+Z9aws= -modernc.org/ccgo/v3 v3.16.9/go.mod h1:zNMzC9A9xeNUepy6KuZBbugn3c0Mc9TeiJO4lgvkJDo= -modernc.org/ccgo/v3 v3.16.13-0.20221017192402-261537637ce8/go.mod h1:fUB3Vn0nVPReA+7IG7yZDfjv1TMWjhQP8gCxrFAtL5g= -modernc.org/ccgo/v3 v3.16.13/go.mod h1:2Quk+5YgpImhPjv2Qsob1DnZ/4som1lJTodubIcoUkY= -modernc.org/ccorpus v1.11.6/go.mod h1:2gEUTrWqdpH2pXsmTM1ZkjeSrUWDpjMu2T6m29L/ErQ= -modernc.org/httpfs v1.0.6/go.mod h1:7dosgurJGp0sPaRanU53W4xZYKh14wfzX420oZADeHM= -modernc.org/libc v0.0.0-20220428101251-2d5f3daf273b/go.mod h1:p7Mg4+koNjc8jkqwcoFBJx7tXkpj00G77X7A72jXPXA= -modernc.org/libc v1.16.0/go.mod h1:N4LD6DBE9cf+Dzf9buBlzVJndKr/iJHG97vGLHYnb5A= -modernc.org/libc v1.16.1/go.mod h1:JjJE0eu4yeK7tab2n4S1w8tlWd9MxXLRzheaRnAKymU= -modernc.org/libc v1.16.17/go.mod h1:hYIV5VZczAmGZAnG15Vdngn5HSF5cSkbvfz2B7GRuVU= -modernc.org/libc v1.16.19/go.mod h1:p7Mg4+koNjc8jkqwcoFBJx7tXkpj00G77X7A72jXPXA= -modernc.org/libc v1.17.0/go.mod h1:XsgLldpP4aWlPlsjqKRdHPqCxCjISdHfM/yeWC5GyW0= -modernc.org/libc v1.17.1/go.mod h1:FZ23b+8LjxZs7XtFMbSzL/EhPxNbfZbErxEHc7cbD9s= -modernc.org/libc v1.17.4/go.mod h1:WNg2ZH56rDEwdropAJeZPQkXmDwh+JCA1s/htl6r2fA= -modernc.org/libc v1.18.0/go.mod h1:vj6zehR5bfc98ipowQOM2nIDUZnVew/wNC/2tOGS+q0= -modernc.org/libc v1.20.3/go.mod h1:ZRfIaEkgrYgZDl6pa4W39HgN5G/yDW+NRmNKZBDFrk0= -modernc.org/libc v1.21.4/go.mod h1:przBsL5RDOZajTVslkugzLBj1evTue36jEomFQOoYuI= -modernc.org/libc v1.22.2/go.mod h1:uvQavJ1pZ0hIoC/jfqNoMLURIMhKzINIWypNM17puug= -modernc.org/mathutil v1.2.2/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= -modernc.org/mathutil v1.4.1/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= -modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E= -modernc.org/memory v1.1.1/go.mod h1:/0wo5ibyrQiaoUoH7f9D8dnglAmILJ5/cxZlRECf+Nw= -modernc.org/memory v1.2.0/go.mod h1:/0wo5ibyrQiaoUoH7f9D8dnglAmILJ5/cxZlRECf+Nw= -modernc.org/memory v1.2.1/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU= -modernc.org/memory v1.3.0/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU= -modernc.org/memory v1.4.0/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU= -modernc.org/memory v1.5.0/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU= -modernc.org/opt v0.1.1/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= -modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= -modernc.org/sqlite v1.18.1/go.mod h1:6ho+Gow7oX5V+OiOQ6Tr4xeqbx13UZ6t+Fw9IRUG4d4= -modernc.org/sqlite v1.18.2/go.mod h1:kvrTLEWgxUcHa2GfHBQtanR1H9ht3hTJNtKpzH9k1u0= -modernc.org/strutil v1.1.1/go.mod h1:DE+MQQ/hjKBZS2zNInV5hhcipt5rLPWkmpbGeW5mmdw= -modernc.org/strutil v1.1.3/go.mod h1:MEHNA7PdEnEwLvspRMtWTNnp2nnyvMfkimT1NKNAGbw= -modernc.org/tcl v1.13.1/go.mod h1:XOLfOwzhkljL4itZkK6T72ckMgvj0BDsnKNdZVUOecw= -modernc.org/tcl v1.13.2/go.mod h1:7CLiGIPo1M8Rv1Mitpv5akc2+8fxUd2y2UzC/MfMzy0= -modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= -modernc.org/token v1.0.1/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= -modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= -modernc.org/z v1.5.1/go.mod h1:eWFB510QWW5Th9YGZT81s+LwvaAs3Q2yr4sP0rmLkv8= -mvdan.cc/gofumpt v0.1.1/go.mod h1:yXG1r1WqZVKWbVRtBWKWX9+CxGYfA51nSomhM0woR48= -mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= -mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4= -mvdan.cc/unparam v0.0.0-20210104141923-aac4ce9116a7/go.mod h1:hBpJkZE8H/sb+VRFvw2+rBpHNsTBcvSpk61hr8mzXZE= nhooyr.io/websocket v1.8.6 h1:s+C3xAMLwGmlI31Nyn/eAehUlZPwfYZu2JXM621Q5/k= nhooyr.io/websocket v1.8.6/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= +pgregory.net/rapid v0.5.5 h1:jkgx1TjbQPD/feRoK+S/mXw9e1uj6WilpHrXJowi6oA= +pgregory.net/rapid v0.5.5/go.mod h1:PY5XlDGj0+V1FCq0o192FdRhpKHGTRIWBgqjDBTrq04= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= diff --git a/integration_tests/cli/auth-demo-hex.sh b/integration_tests/cli/auth-demo-hex.sh index 0c3127a78..9492284f5 100644 --- a/integration_tests/cli/auth-demo-hex.sh +++ b/integration_tests/cli/auth-demo-hex.sh @@ -44,6 +44,7 @@ pid=2577 echo "Jack proposes account for $user" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$user_address" --pubkey="$user_pubkey" --roles="Vendor" --vid="$vid_in_hex_format" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -77,6 +78,7 @@ test_divider productName="Device #1" echo "$user adds Model with VID: $vid_in_hex_format PID: $pid" result=$(echo "test1234" | dcld tx model add-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --productName="$productName" --productLabel="Device Description" --commissioningCustomFlow=0 --deviceTypeID=12 --partNumber=12 --from=$user_address --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "\"code\": 0" test_divider @@ -85,12 +87,14 @@ vid_plus_one_in_hex_format=0xA14 vidPlusOne=$((vid_in_hex_format+1)) echo "$user adds Model with a VID: $vid_plus_one_in_hex_format PID: $pid_in_hex_format, This fails with Permission denied as the VID is not associated with this vendor account." result=$(echo "test1234" | dcld tx model add-model --vid=$vid_plus_one_in_hex_format --pid=$pid_in_hex_format --productName="$productName" --productLabel="Device Description" --commissioningCustomFlow=0 --deviceTypeID=12 --partNumber=12 --from=$user_address --yes 2>&1) || true +result=$(get_txn_result "$result") check_response_and_report "$result" "transaction should be signed by a vendor account containing the vendorID $vidPlusOne" test_divider echo "$user updates Model with VID: $vid_in_hex_format PID: $pid_in_hex_format" result=$(echo "test1234" | dcld tx model update-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --productName="$productName" --productLabel="Device Description" --partNumber=12 --from=$user_address --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "\"code\": 0" test_divider diff --git a/integration_tests/cli/auth-demo.sh b/integration_tests/cli/auth-demo.sh index 895d8b03c..c26b0c19e 100755 --- a/integration_tests/cli/auth-demo.sh +++ b/integration_tests/cli/auth-demo.sh @@ -84,6 +84,7 @@ test_divider echo "Jack proposes account for $user" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$user_address" --pubkey="$user_pubkey" --roles="NodeAdmin" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -122,12 +123,14 @@ test_divider echo "Alice approves account for \"$user\"" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$user_address" --info="Alice is approving this account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Alice cannot reject account for \"$user\"" result=$(echo $passphrase | dcld tx auth reject-add-account --address="$user_address" --info="Alice is rejecting this account" --from alice --yes 2>&1 || true) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" test_divider @@ -184,6 +187,7 @@ test_divider echo "Alice proposes to revoke account for $user" result=$(echo $passphrase | dcld tx auth propose-revoke-account --address="$user_address" --info="Alice proposes to revoke account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" @@ -227,6 +231,7 @@ test_divider echo "Bob approves to revoke account for $user" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$user_address" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -279,6 +284,7 @@ test_divider echo "Jack proposes account for $user" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$user_address" --pubkey="$user_pubkey" --roles="NodeAdmin" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -323,6 +329,7 @@ test_divider echo "Alice approves account for \"$user\"" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$user_address" --info="Alice is approving this account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -377,6 +384,7 @@ test_divider echo "Alice proposes to revoke account for $user" result=$(echo $passphrase | dcld tx auth propose-revoke-account --address="$user_address" --info="Alice proposes to revoke account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" @@ -420,6 +428,7 @@ test_divider echo "Bob approves to revoke account for $user" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$user_address" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -462,6 +471,7 @@ test_divider echo "Jack proposes account for $user" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$user_address" --pubkey="$user_pubkey" --roles="NodeAdmin" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -477,6 +487,7 @@ test_divider echo "Alice rejects account for \"$user\"" result=$(echo $passphrase | dcld tx auth reject-add-account --address="$user_address" --info="Alice is rejecting this account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -541,12 +552,14 @@ test_divider echo "Bob rejects account for \"$user\"" result=$(echo $passphrase | dcld tx auth reject-add-account --address="$user_address" --info="Bob is rejecting this account" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Bob cannot reject the same account \"$user\" for the second time" result=$(echo $passphrase | dcld tx auth reject-add-account --address="$user_address" --info="Bob is rejecting this account" --from bob --yes 2>&1 || true) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" test_divider @@ -642,6 +655,7 @@ test_divider echo "Jack proposes account for $user" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$user_address" --pubkey="$user_pubkey" --roles="Vendor,NodeAdmin" --vid=$vid --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -674,6 +688,7 @@ test_divider echo "Alice approves account for \"$user\"" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$user_address" --info="Alice is approving this account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -731,6 +746,7 @@ test_divider echo "Jack proposes account for $user" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$user_address" --pubkey="$user_pubkey" --roles="Vendor" --vid=$vid --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -786,6 +802,7 @@ test_divider echo "Jack proposes account for $user" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$user_address" --pubkey="$user_pubkey" --roles="Vendor" --vid=$vid --pid_ranges=$pid_ranges --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -881,10 +898,12 @@ test_divider echo "Jack proposes account for $new_trustee1" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$new_trustee_address1" --pubkey="$new_trustee_pubkey1" --roles="Trustee" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Alice approves account for \"$new_trustee1\"" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$new_trustee_address1" --info="Alice is approving this account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -909,24 +928,29 @@ test_divider echo "Jack proposes account for $new_trustee2" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$new_trustee_address2" --pubkey="$new_trustee_pubkey2" --roles="Trustee" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Alice approves account for \"$new_trustee2\"" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$new_trustee_address2" --info="Alice is approving this account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Jack can reject account for \"$user\" even if Jack already approved account" result=$(echo $passphrase | dcld tx auth reject-add-account --address="$new_trustee_address2" --info="Jack is rejecting this account" --from jack --yes 2>&1 || true) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Jack re-approves account for \"$user\"" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$new_trustee_address2" --info="Jack is proposing this account" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Bob approves account for \"$new_trustee2\"" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$new_trustee_address2" --info="Bob is approving this account" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -954,6 +978,7 @@ test_divider echo "Jack proposes account for $user" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$user_address" --pubkey="$user_pubkey" --roles="Vendor" --vid=$vid --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -986,6 +1011,7 @@ test_divider echo "Alice approves account for \"$user\"" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$user_address" --info="Alice is approving this account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1034,6 +1060,7 @@ test_divider # REVOKE ACCOUNT, WE NEED 4 TRUSTEE'S APPROVALS, BECAUSE WE HAVE 5 TRUSTEES echo "Alice proposes to revoke account for $user" result=$(echo $passphrase | dcld tx auth propose-revoke-account --address="$user_address" --info="Alice proposes to revoke account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1048,6 +1075,7 @@ test_divider echo "Bob approves to revoke account for $user" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$user_address" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1063,18 +1091,22 @@ test_divider # REMOVE TRUSTEE ACCOUNT echo "Alice proposes to revoke account for $new_trustee1" result=$(echo $passphrase | dcld tx auth propose-revoke-account --address="$new_trustee_address1" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Bob approves to revoke account for $new_trustee1" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$new_trustee_address1" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Jack approves to revoke account for $new_trustee1" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$new_trustee_address1" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$new_trustee1 approves to revoke account for $new_trustee1" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$new_trustee_address1" --from $new_trustee1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Get revoked account $new_trustee1" @@ -1087,6 +1119,7 @@ test_divider # REVOKE ACCOUNT echo "Jack approves to revoke account for $user" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$user_address" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1103,6 +1136,7 @@ test_divider # REJECT A NEW ACCOUNT, WE NEED 3 TRUSTEE'S REJECTS, BECAUSE WE HAVE 4 TRUSTEES echo "Jack proposes account for $user" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$user_address" --pubkey="$user_pubkey" --roles="Vendor,NodeAdmin" --vid=$vid --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1118,6 +1152,7 @@ test_divider echo "Bob rejects account for \"$user\"" result=$(echo $passphrase | dcld tx auth reject-add-account --address="$user_address" --info="Bob is rejecting this account" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1135,14 +1170,17 @@ test_divider # WE REMOVE TRUSTEE ACCOUNT echo "Alice proposes to revoke account for $new_trustee2" result=$(echo $passphrase | dcld tx auth propose-revoke-account --address="$new_trustee_address2" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Bob approves to revoke account for $new_trustee2" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$new_trustee_address2" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Jack approves to revoke account for $new_trustee2" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$new_trustee_address2" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Get revoked account $new_trustee2" @@ -1155,6 +1193,7 @@ test_divider # REJECT A NEW ACCOUNT echo "Alice rejects account for \"$user\"" result=$(echo $passphrase | dcld tx auth reject-add-account --address="$user_address" --info="Alice is rejecting this account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1194,10 +1233,12 @@ new_trustee_pubkey=$(echo $passphrase | dcld keys show $new_trustee -p) echo "Jack proposes account for $new_trustee" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$new_trustee_address" --pubkey="$new_trustee_pubkey" --roles="Trustee" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Alice approves account for \"$new_trustee\"" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$new_trustee_address" --info="Alice is approving this account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1205,6 +1246,7 @@ test_divider # ADD A NEW ACCOUNT, WE NEED 3 TRUSTEE'S APPROVALS, BECAUSE WE HAVE 4 TRUSTEES echo "Jack proposes account for $user" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$user_address" --pubkey="$user_pubkey" --roles="Vendor,NodeAdmin" --vid=$vid --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1219,6 +1261,7 @@ test_divider echo "Bob approves account for \"$new_trustee\"" result=$(echo $passphrase | dcld tx auth approve-add-account --info="Bob is approving this account" --address="$user_address" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1236,14 +1279,17 @@ test_divider # WE REMOVE TRUSTEE ACCOUNT echo "Alice proposes to revoke account for $new_trustee" result=$(echo $passphrase | dcld tx auth propose-revoke-account --address="$new_trustee_address" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Bob approves to revoke account for $new_trustee" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$new_trustee_address" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Jack approves to revoke account for $new_trustee" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$new_trustee_address" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Get revoked account $new_trustee" @@ -1256,6 +1302,7 @@ test_divider # ADD A NEW ACCOUNT echo "Alice approves account for \"$user\"" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$user_address" --info="Alice is approving this account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1289,6 +1336,7 @@ test_divider productName="Device #1" echo "$user adds Model with VID: $vid PID: $pid" result=$(echo "test1234" | dcld tx model add-model --vid=$vid --pid=$pid --productName="$productName" --productLabel="Device Description" --commissioningCustomFlow=0 --deviceTypeID=12 --partNumber=12 --from=$user_address --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "\"code\": 0" test_divider @@ -1296,11 +1344,13 @@ test_divider vidPlusOne=$((vid+1)) echo "$user adds Model with a VID: $vidPlusOne PID: $pid, This fails with Permission denied as the VID is not associated with this vendor account." result=$(echo "test1234" | dcld tx model add-model --vid=$vidPlusOne --pid=$pid --productName="$productName" --productLabel="Device Description" --commissioningCustomFlow=0 --deviceTypeID=12 --partNumber=12 --from=$user_address --yes 2>&1) || true +result=$(get_txn_result "$result") check_response_and_report "$result" "transaction should be signed by a vendor account containing the vendorID $vidPlusOne" test_divider echo "$user updates Model with VID: $vid PID: $pid" result=$(echo "test1234" | dcld tx model update-model --vid=$vid --pid=$pid --productName="$productName" --productLabel="Device Description" --partNumber=12 --from=$user_address --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "\"code\": 0" test_divider @@ -1332,10 +1382,12 @@ user_pubkey=$(echo $passphrase | dcld keys show $user -p) echo "jack (Trustee) propose account" result=$(dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$user_address" --pubkey="$user_pubkey" --roles="NodeAdmin" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "jack (Trustee) rejects account" result=$(dcld tx auth reject-add-account --address="$user_address" --info="Jack is rejecting this account" --from jack --yes 2>&1 || true) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider diff --git a/integration_tests/cli/common.sh b/integration_tests/cli/common.sh index 44b2a4c94..04c6cc18d 100755 --- a/integration_tests/cli/common.sh +++ b/integration_tests/cli/common.sh @@ -122,11 +122,13 @@ create_new_account(){ echo "Jack proposes account for \"$name\" with roles: \"$roles\"" result=$(echo $passphrase | dcld tx auth propose-add-account --address="$address" --pubkey="$pubkey" --roles=$roles --from jack --yes) + result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" echo "Alice approves account for \"$name\" with roles: \"$roles\"" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$address" --from alice --yes) + result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" } @@ -149,8 +151,9 @@ create_new_vendor_account(){ echo "Jack proposes account for \"$_name\" with Vendor role" _result=$(echo $passphrase | dcld tx auth propose-add-account --address="$_address" --pubkey="$_pubkey" --roles=Vendor --vid=$_vid --from jack --yes) fi - check_response "$_result" "\"code\": 0" + _result=$(get_txn_result "$_result") + check_response "$_result" "\"code\": 0" } create_model_and_version() { @@ -160,8 +163,10 @@ create_model_and_version() { local _softwareVersionString="$4" local _user_address="$5" result=$(echo "$passphrase" | dcld tx model add-model --vid=$_vid --pid=$_pid --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from=$_user_address --yes) + result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx model add-model-version --cdVersionNumber=1 --maxApplicableSoftwareVersion=10 --minApplicableSoftwareVersion=1 --vid=$_vid --pid=$_pid --softwareVersion=$_softwareVersion --softwareVersionString=$_softwareVersionString --from=$_user_address --yes) + result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" } @@ -224,12 +229,13 @@ wait_for_height() { # TODO: see https://github.com/zigbee-alliance/distributed-compliance-ledger/issues/203 execute_with_retry() { local _command=${1} - + local _error=${2:-"EOF"} local _result=$($_command) for i in {1..10}; do - if [[ "$(_check_response "$_result" "EOF" "raw")" == true ]]; then + if [[ "$(_check_response "$_result" "$_error" "raw")" == true ]]; then #echo "EOF detected, re-trying" + sleep 2 _result=$($_command) else break @@ -238,3 +244,24 @@ execute_with_retry() { echo "$_result" } + + +get_txn_result() { + local _broadcast_result=${1} + # Remove parts before the first `{` + _broadcast_result=$(echo "$_broadcast_result" | sed -n 's/^[^{]*{\(.*\)/{\1/p') + local _txHash=$(echo "$_broadcast_result" | jq -r '.txhash') + local _command="dcld query tx $_txHash" + local _result=$($_command 2>&1) + + for i in {1..20}; do + if [[ "$(_check_response "$_result" "not found" "raw")" == true ]]; then + sleep 2 + _result=$($_command 2>&1) + else + break + fi + done + + echo "$_result" +} diff --git a/integration_tests/cli/compliance-demo-hex.sh b/integration_tests/cli/compliance-demo-hex.sh index 04fd97ea7..a2ad30958 100644 --- a/integration_tests/cli/compliance-demo-hex.sh +++ b/integration_tests/cli/compliance-demo-hex.sh @@ -48,6 +48,7 @@ cd_certificate_id="some ID" echo "Add Model with VID: $vid_in_hex_format PID: $pid_in_hex_format" result=$(echo "$passphrase" | dcld tx model add-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from $vendor_account --yes) +result=$(get_txn_result "$result") echo $result check_response "$result" "\"code\": 0" @@ -55,6 +56,7 @@ test_divider echo "Certify unknown Model Version with VID: $vid_in_hex_format PID: $pid_in_hex_format SV: ${sv} with zigbee certification" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --from $zb_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 517" check_response "$result" "No model version" @@ -63,6 +65,7 @@ test_divider echo "Add Model Version with VID: $vid_in_hex_format PID: $pid_in_hex_format SV: $sv SoftwareVersionString:$svs" result=$(echo '$passphrase' | dcld tx model add-model-version --cdVersionNumber=1 --maxApplicableSoftwareVersion=10 --minApplicableSoftwareVersion=1 --vid=$vid_in_hex_format --pid=$pid_in_hex_format --softwareVersion=$sv --softwareVersionString=$svs --from=$vendor_account --yes) +result=$(get_txn_result "$result") echo $result check_response "$result" "\"code\": 0" @@ -116,6 +119,7 @@ test_divider invalid_svs=$RANDOM echo "Certify Model with VID: $vid_in_hex_format PID: $pid_in_hex_format SV: ${sv} with zigbee certification and invalid SoftwareVersionString: $invalid_svs" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --softwareVersion=$sv --softwareVersionString=$invalid_svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 306" # check_response "$result" "failed to execute message; message index: 0: Model with vid=$vid, pid=$pid, softwareVersion=$sv present on the ledger does not have matching softwareVersionString=$invalid_svs: model version does not match" @@ -123,6 +127,7 @@ test_divider echo "Certify Model with VID: $vid_in_hex_format PID: $pid_in_hex_format SV: ${sv} with zigbee certification" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" @@ -131,6 +136,7 @@ test_divider echo "Certify Model with VID: $vid_in_hex_format PID: $pid_in_hex_format SV: ${sv} with matter certification" echo "dcld tx compliance certify-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$matter_certification_type" --certificationDate="$certification_date" --from $zb_account --yes" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$matter_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" @@ -139,6 +145,7 @@ test_divider echo "ReCertify Model with VID: $vid_in_hex_format PID: $pid_in_hex_format SV: ${sv} by different account" zigbee_certification_type="zigbee" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $second_zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 303" check_response "$result" "already certified on the ledger" echo "$result" @@ -148,6 +155,7 @@ test_divider echo "ReCertify Model with VID: $vid_in_hex_format PID: $pid_in_hex_format SV: ${sv} by same account" zigbee_certification_type="zigbee" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 303" check_response "$result" "already certified on the ledger" echo "$result" @@ -249,6 +257,7 @@ revocation_reason="some reason" echo "Revoke Certification for Model with VID: $vid_in_hex_format PID: $pid_in_hex_format SV: ${sv} from the past" revocation_date_past="2020-01-01T00:00:00Z" result=$(echo "$passphrase" | dcld tx compliance revoke-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --revocationDate="$revocation_date_past" --reason "$revocation_reason" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 302" check_response "$result" "must be after" echo "$result" @@ -259,6 +268,7 @@ test_divider echo "Revoke Certification for Model with VID: $vid_in_hex_format PID: $pid_in_hex_format SV: ${sv} " revocation_date="2020-02-02T02:20:20Z" result=$(echo "$passphrase" | dcld tx compliance revoke-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --revocationDate="$revocation_date" --reason "$revocation_reason" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -314,6 +324,7 @@ test_divider echo "Again Certify Model with VID: $vid_in_hex_format PID: $pid_in_hex_format SV: ${sv}" certification_date="2020-03-03T00:00:00Z" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" diff --git a/integration_tests/cli/compliance-demo.sh b/integration_tests/cli/compliance-demo.sh index a232b8a0f..2aa594602 100755 --- a/integration_tests/cli/compliance-demo.sh +++ b/integration_tests/cli/compliance-demo.sh @@ -48,6 +48,7 @@ schema_version_0=0 schema_version_2=2 echo "Certify unknown Model with VID: $vid PID: $pid SV: ${sv} with zigbee certification" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --from $zb_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 517" check_response "$result" "No model version" @@ -56,6 +57,7 @@ test_divider echo "Add Model with VID: $vid PID: $pid" result=$(echo "$passphrase" | dcld tx model add-model --vid=$vid --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from $vendor_account --yes) +result=$(get_txn_result "$result") echo $result check_response "$result" "\"code\": 0" @@ -63,6 +65,7 @@ test_divider echo "Certify unknown Model Version with VID: $vid PID: $pid SV: ${sv} with zigbee certification" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --from $zb_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 517" check_response "$result" "No model version" @@ -71,6 +74,7 @@ test_divider echo "Add Model Version with VID: $vid PID: $pid SV: $sv SoftwareVersionString:$svs" result=$(echo '$passphrase' | dcld tx model add-model-version --cdVersionNumber=$cd_version_number --maxApplicableSoftwareVersion=10 --minApplicableSoftwareVersion=1 --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --from=$vendor_account --yes) +result=$(get_txn_result "$result") echo $result check_response "$result" "\"code\": 0" @@ -169,6 +173,7 @@ test_divider invalid_svs=$RANDOM echo "Certify Model with VID: $vid PID: $pid SV: ${sv} with zigbee certification and invalid SoftwareVersionString: $invalid_svs" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$invalid_svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 306" # check_response "$result" "failed to execute message; message index: 0: Model with vid=$vid, pid=$pid, softwareVersion=$svs present on the ledger does not have matching softwareVersionString=$invalid_svs: model version does not match" @@ -176,6 +181,7 @@ test_divider echo "Certify Model with VID: $vid PID: $pid SV: ${sv} with zigbee certification and invalid CDVersionNumber" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --from $zb_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 306" check_response "$result" "ledger does not have matching CDVersionNumber=0: model version does not match" @@ -183,7 +189,8 @@ check_response "$result" "ledger does not have matching CDVersionNumber=0: model test_divider echo "Certify Model with VID: $vid PID: $pid SV: ${sv} with zigbee certification" -result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=$cd_version_number --schemaVersion=$schema_version_2 --from $zb_account --yes) +result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=$cd_version_number --schemaVersion=$schema_version_2 --from $zb_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" @@ -192,6 +199,7 @@ test_divider echo "Certify Model with VID: $vid PID: $pid SV: ${sv} with matter certification" echo "dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$matter_certification_type" --certificationDate="$certification_date" --from $zb_account --yes" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$matter_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=$cd_version_number --from $zb_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" @@ -200,6 +208,7 @@ test_divider echo "ReCertify Model with VID: $vid PID: $pid SV: ${sv} by different account" zigbee_certification_type="zigbee" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=$cd_version_number --from $second_zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 303" check_response "$result" "already certified on the ledger" echo "$result" @@ -209,6 +218,7 @@ test_divider echo "ReCertify Model with VID: $vid PID: $pid SV: ${sv} by same account" zigbee_certification_type="zigbee" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=$cd_version_number --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 303" check_response "$result" "already certified on the ledger" echo "$result" @@ -363,6 +373,7 @@ revocation_reason="some reason" echo "Revoke Certification for Model with VID: $vid PID: $pid SV: ${sv} from the past" revocation_date_past="2020-01-01T00:00:00Z" result=$(echo "$passphrase" | dcld tx compliance revoke-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --revocationDate="$revocation_date_past" --reason "$revocation_reason" --cdVersionNumber=$cd_version_number --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 302" check_response "$result" "must be after" echo "$result" @@ -373,6 +384,7 @@ test_divider echo "Revoke Certification for Model with VID: $vid PID: $pid SV: ${sv} " revocation_date="2020-02-02T02:20:20Z" result=$(echo "$passphrase" | dcld tx compliance revoke-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --revocationDate="$revocation_date" --reason "$revocation_reason" --cdVersionNumber=$cd_version_number --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -449,6 +461,7 @@ test_divider echo "Again Certify Model with VID: $vid PID: $pid SV: ${sv}" certification_date="2020-03-03T00:00:00Z" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=$cd_version_number --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -539,6 +552,7 @@ svs=$RANDOM # ADD MODEL echo "Add Model with VID: $vid PID: $pid" result=$(echo "$passphrase" | dcld tx model add-model --vid=$vid --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from $vendor_account --yes) +result=$(get_txn_result "$result") echo $result check_response "$result" "\"code\": 0" @@ -547,6 +561,7 @@ test_divider # ADD MODEL VERSION echo "Add Model Version with VID: $vid PID: $pid SV: $sv SoftwareVersionString:$svs" result=$(echo '$passphrase' | dcld tx model add-model-version --cdVersionNumber=$cd_version_number --maxApplicableSoftwareVersion=10 --minApplicableSoftwareVersion=1 --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --from=$vendor_account --yes) +result=$(get_txn_result "$result") echo $result check_response "$result" "\"code\": 0" @@ -555,6 +570,7 @@ test_divider # ADD CERTIFY MODEL WITH ALL OPTIONAL FIELDS echo "Certify Model with VID: $vid PID: $pid SV: ${sv} with zigbee certification" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=$cd_version_number --programTypeVersion="1.0" --familyId="someFID" --supportedClusters="someClusters" --compliantPlatformUsed="WIFI" --compliantPlatformVersion="V1" --OSVersion="someV" --certificationRoute="Full" --programType="pType" --transport="someTransport" --parentChild="parent" --certificationIDOfSoftwareComponent="someIDOfSoftwareComponent" --from $zb_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" @@ -627,6 +643,7 @@ new_transport="new_transport" # UPDATE COMPLIANCE INFO BY CERTIFICATION CENTER ACCOUNT echo "Update Compliance Info for Model with VID: ${vid} PID: ${pid} SV: ${sv} for $zigbee_certification_type with some optional fields set" result=$(echo "$passphrase" | dcld tx compliance update-compliance-info --vid=$vid --pid=$pid --softwareVersion=$sv --certificationType=$zigbee_certification_type --reason=$new_reason --programType=$new_program_type --parentChild=$new_parent_child --transport=$new_transport --from=$zb_account --yes) +result=$(get_txn_result "$result") echo "$result" result=$(dcld query compliance compliance-info --vid=$vid --pid=$pid --softwareVersion=$sv --certificationType=$zigbee_certification_type) @@ -660,6 +677,7 @@ test_divider # UPDATE COMPLIANCE INFO BY *NON CERTIFICATION CENTER ACCOUNT echo "Update Compliance Info for Model with VID: ${vid} PID: ${pid} SV: ${sv} for $zigbee_certification_type by non Certification Center account" result=$(echo "$passphrase" | dcld tx compliance update-compliance-info --vid=$vid --pid=$pid --softwareVersion=$sv --certificationType=$zigbee_certification_type --reason=$by_vendor_reason --programType=$by_vendor_program_type --parentChild=$by_vendor_parent_child --transport=$by_vendor_transport --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "unauthorized" echo "Compliance Info for Model with VID: ${vid} PID: ${pid} SV: ${sv} for $zigbee_certification_type is not updated by non CertificationCenter account" @@ -689,6 +707,7 @@ test_divider # UPDATE COMPLIANCE INFO WITH NO OPTIONAL FIELDS SET echo "Update Compliance Info for Model with VID: ${vid} PID: ${pid} SV: ${sv} for $zigbee_certification_type with no optional fields set" result=$(echo "$passphrase" | dcld tx compliance update-compliance-info --vid=$vid --pid=$pid --softwareVersion=$sv --certificationType=$zigbee_certification_type --from=$zb_account --yes) +result=$(get_txn_result "$result") echo "$result" echo "Compliance Info for Model with VID: ${vid} PID: ${pid} SV: ${sv} for $zigbee_certification_type is not updated" @@ -735,6 +754,7 @@ schema_version_3=3 echo "Update Compliance Info for Model with VID: ${vid} PID: ${pid} SV: ${sv} for $zigbee_certification_type with all optional fields set" result=$(echo "$passphrase" | dcld tx compliance update-compliance-info --vid=$vid --pid=$pid --softwareVersion=$sv --certificationType=$zigbee_certification_type --cdVersionNumber=$upd_cd_version_number --certificationDate=$upd_certification_date --reason=$upd_reason --cdCertificateId=$upd_cd_certificate_id --certificationRoute=$upd_certification_route --programType=$upd_program_type --programTypeVersion=$upd_program_type_version --compliantPlatformUsed=$upd_compliant_platform_used --compliantPlatformVersion=$upd_compliant_platform_version --transport=$upd_transport --familyId=$upd_familyID --supportedClusters=$upd_supported_clusters --OSVersion=$upd_os_version --parentChild=$upd_parent_child --certificationIDOfSoftwareComponent=$upd_certification_id_of_software_component --schemaVersion=$schema_version_3 --from=$zb_account --yes) +result=$(get_txn_result "$result") echo "$result" echo "Compliance Info for Model with VID: ${vid} PID: ${pid} SV: ${sv} for $zigbee_certification_type all fields updated" @@ -772,6 +792,7 @@ check_response "$result" "\"pid\": $pid" # delete compliance info echo "delete compliance info vid=$vid pid=$pid softwareVerion=$sv certificationType=$zigbee_certification_type" result=$(echo "$passphrase" | dcld tx compliance delete-compliance-info --vid=$vid --pid=$pid --softwareVersion=$sv --certificationType=$zigbee_certification_type --from=$zb_account --yes) +result=$(get_txn_result "$result") test_divider diff --git a/integration_tests/cli/compliance-provisioning.sh b/integration_tests/cli/compliance-provisioning.sh index 527bccf96..1f3375053 100755 --- a/integration_tests/cli/compliance-provisioning.sh +++ b/integration_tests/cli/compliance-provisioning.sh @@ -49,7 +49,8 @@ echo "Add Model and a New Model Version with VID: $vid PID: $pid SV: $sv" create_model_and_version $vid $pid $sv $svs $vendor_account echo "Provision for uncertificate Model with VID: $vid PID: $pid for ZB" -result=$(echo "$passphrase" | dcld tx compliance provision-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type_zb" --provisionalDate="$provision_date" --reason "$provision_reason" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --schemaVersion=$schema_version_2 --from $zb_account --yes) +result=$(echo "$passphrase" | dcld tx compliance provision-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type_zb" --provisionalDate="$provision_date" --reason "$provision_reason" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --schemaVersion=$schema_version_2 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -57,6 +58,7 @@ test_divider echo "Provision for uncertificate Model with VID: $vid PID: $pid for Matter" result=$(echo "$passphrase" | dcld tx compliance provision-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type_matter" --provisionalDate="$provision_date" --reason "$provision_reason" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -65,6 +67,7 @@ test_divider echo "ReProvision Model with VID: $vid PID: $pid SV: ${sv} by different account" result=$(echo "$passphrase" | dcld tx compliance provision-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type_zb" --provisionalDate="$provision_date" --reason "$provision_reason" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 305" check_response "$result" "already in provisional state" echo "$result" @@ -73,6 +76,7 @@ test_divider echo "ReProvision Model with VID: $vid PID: $pid SV: ${sv} by same account" result=$(echo "$passphrase" | dcld tx compliance provision-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type_zb" --provisionalDate="$provision_date" --reason "$provision_reason" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 305" check_response "$result" "already in provisional state" echo "$result" @@ -234,6 +238,7 @@ echo "Certify Model with VID: $vid PID: $pid2 for Matter" certification_date="2021-02-02T02:20:19Z" certification_reason="some reason 2" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid2 --softwareVersion=$sv2 --softwareVersionString=$svs2 --certificationType="$certification_type_matter" --certificationDate="$certification_date" --reason "$certification_reason" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -254,6 +259,7 @@ test_divider echo "Provision for already certified Model with VID: $vid PID: $pid2 for Matter" result=$(echo "$passphrase" | dcld tx compliance provision-model --vid=$vid --pid=$pid2 --softwareVersion=$sv2 --softwareVersionString=$svs2 --certificationType="$certification_type_matter" --provisionalDate="$provision_date" --reason "$provision_reason" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 303" check_response "$result" "already certified on the ledger" echo "$result" @@ -273,6 +279,7 @@ echo "Revoke Certification for uncertificate Model with VID: $vid PID: $pid3" revocation_date="2021-02-02T02:20:20Z" revocation_reason="some reason 11" result=$(echo "$passphrase" | dcld tx compliance revoke-model --vid=$vid --pid=$pid3 --softwareVersion=$sv3 --softwareVersionString=$svs3 --certificationType="$certification_type_zb" --revocationDate="$revocation_date" --reason "$revocation_reason" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -280,6 +287,7 @@ test_divider echo "Provision for already revoked Model with VID: $vid PID: $pid3 for Matter" result=$(echo "$passphrase" | dcld tx compliance provision-model --vid=$vid --pid=$pid3 --softwareVersion=$sv3 --softwareVersionString=$svs3 --certificationType="$certification_type_zb" --provisionalDate="$provision_date" --reason "$provision_reason" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 304" check_response "$result" "already revoked on the ledger" echo "$result" @@ -292,6 +300,7 @@ echo "Certify provisioned Model with VID: $vid PID: $pid" certification_date="2021-02-02T02:20:19Z" certification_reason="some reason 2" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type_zb" --certificationDate="$certification_date" --reason "$certification_reason" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -301,6 +310,7 @@ echo "Revoke provisioned Model with VID: $vid PID: $pid" revocation_date="2021-02-02T02:20:20Z" revocation_reason="some reason 22" result=$(echo "$passphrase" | dcld tx compliance revoke-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type_matter" --revocationDate="$revocation_date" --reason "$revocation_reason" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -466,6 +476,7 @@ create_model_and_version $vid $pid $sv $svs $vendor_account # ADD PROVISION MODEL WITH ALL OPTIONAL FIELDS echo "Provision Model with VID: $vid PID: $pid SV: ${sv} with zigbee certification" result=$(echo "$passphrase" | dcld tx compliance provision-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type_zb" --provisionalDate="$provision_date" --reason "$provision_reason" --cdCertificateId="$cd_certificate_id" --programTypeVersion="1.0" --familyId="someFID" --supportedClusters="someClusters" --compliantPlatformUsed="WIFI" --compliantPlatformVersion="V1" --OSVersion="someV" --certificationRoute="Full" --programType="pType" --transport="someTransport" --parentChild="parent" --certificationIDOfSoftwareComponent="someIDOfSoftwareComponent1" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" @@ -507,6 +518,7 @@ test_divider # ADD CERTIFY MODEL WITH SOME OPTIONAL FIELDS echo "Certify Model with VID: $vid PID: $pid SV: ${sv} with zigbee certification" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type_zb" --cdVersionNumber=1 --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --programTypeVersion="2.0" --familyId="someFID2" --supportedClusters="someClusters2" --compliantPlatformUsed="ETHERNET" --compliantPlatformVersion="V2" --certificationIDOfSoftwareComponent="someIDOfSoftwareComponent2" --from $zb_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" diff --git a/integration_tests/cli/compliance-revocation.sh b/integration_tests/cli/compliance-revocation.sh index 10b0f8aa1..3c01d9d11 100755 --- a/integration_tests/cli/compliance-revocation.sh +++ b/integration_tests/cli/compliance-revocation.sh @@ -51,6 +51,7 @@ revocation_reason="some reason" schema_version_0=0 schema_version_2=2 result=$(echo "$passphrase" | dcld tx compliance revoke-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type" --revocationDate="$revocation_date" --reason "$revocation_reason" --cdVersionNumber=1 --schemaVersion=$schema_version_2 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -58,6 +59,7 @@ test_divider echo "ReRevoke Model with VID: $vid PID: $pid SV: ${sv} by different account" result=$(echo "$passphrase" | dcld tx compliance revoke-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type" --revocationDate="$revocation_date" --from $second_zb_account --cdVersionNumber=1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 304" check_response "$result" "already revoked on the ledger" echo "$result" @@ -66,6 +68,7 @@ test_divider echo "ReRevoke Model with VID: $vid PID: $pid SV: ${sv} by same account" result=$(echo "$passphrase" | dcld tx compliance revoke-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type" --revocationDate="$revocation_date" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 304" check_response "$result" "already revoked on the ledger" echo "$result" @@ -76,6 +79,7 @@ echo "Revoke Certification for uncertificate Model with VID: $vid PID: $pid for revocation_date="2020-02-02T02:20:20Z" revocation_reason="some reason" result=$(echo "$passphrase" | dcld tx compliance revoke-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type_matter" --revocationDate="$revocation_date" --reason "$revocation_reason" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -206,6 +210,7 @@ certification_reason="some reason 2" echo "Certify revoked Model with VID: $vid PID: $pid from the past" certification_date_past="2020-02-02T02:20:19Z" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type" --certificationDate="$certification_date_past" --reason "$certification_reason" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 302" check_response "$result" "must be after" echo "$result" @@ -215,6 +220,7 @@ test_divider echo "Certify revoked Model with VID: $vid PID: $pid for ZB" certification_date="2020-02-02T02:20:21Z" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="$certification_type" --certificationDate="$certification_date" --reason "$certification_reason" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" diff --git a/integration_tests/cli/model-demo-hex.sh b/integration_tests/cli/model-demo-hex.sh index c6b4d26a9..ea8e2103f 100644 --- a/integration_tests/cli/model-demo-hex.sh +++ b/integration_tests/cli/model-demo-hex.sh @@ -58,6 +58,7 @@ test_divider productLabel="Device #1" echo "Add Model with VID: $vid_in_hex_format PID: $pid_in_hex_format" result=$(echo "test1234" | dcld tx model add-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --deviceTypeID=1 --productName=TestProduct --productLabel="$productLabel" --partNumber=1 --commissioningCustomFlow=0 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -90,6 +91,7 @@ test_divider echo "Update Model with VID: ${vid_in_hex_format} PID: ${pid_in_hex_format} with new description" description="New Device Description" result=$(echo "test1234" | dcld tx model update-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --from $vendor_account --yes --productLabel "$description") +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -107,6 +109,7 @@ test_divider echo "Update Model with VID: ${vid_in_hex_format} PID: ${pid_in_hex_format} modifying supportURL" supportURL="https://newsupporturl.test" result=$(echo "test1234" | dcld tx model update-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --from $vendor_account --yes --supportURL "$supportURL") +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -123,6 +126,7 @@ test_divider echo "Delete Model with VID: ${vid_in_hex_format} PID: ${pid_in_hex_format}" result=$(dcld tx model delete-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --from=$vendor_account --yes) +result=$(get_txn_result "$result") echo "$result" test_divider diff --git a/integration_tests/cli/model-demo.sh b/integration_tests/cli/model-demo.sh index 42ad404f9..e8f60030e 100755 --- a/integration_tests/cli/model-demo.sh +++ b/integration_tests/cli/model-demo.sh @@ -63,6 +63,7 @@ schema_version_2=2 commissionerRemoteUiFlowURL="https://commissionerRemoteUiFlowURL.dclmodel" echo "Add Model with VID: $vid PID: $pid" result=$(echo "test1234" | dcld tx model add-model --vid=$vid --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel="$productLabel" --partNumber=1 --commissioningCustomFlow=0 --commissionerRemoteUiFlowURL="$commissionerRemoteUiFlowURL" --schemaVersion=$schema_version_2 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -71,6 +72,7 @@ test_divider productLabel="Device #1" echo "Add Model with VID: $vid_with_pids PID: $pid" result=$(echo "test1234" | dcld tx model add-model --vid=$vid_with_pids --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel="$productLabel" --partNumber=1 --commissioningCustomFlow=0 --from=$vendor_account_with_pids --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -99,6 +101,7 @@ sv=1 cd_version_num=10 echo "Create Model Versions with VID: $vid PID: $pid SoftwareVersion: $sv" result=$(echo "test1234" | dcld tx model add-model-version --vid=$vid --pid=$pid --softwareVersion=$sv --minApplicableSoftwareVersion=1 --maxApplicableSoftwareVersion=15 --softwareVersionString=$sv --cdVersionNumber=$cd_version_num --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -106,6 +109,7 @@ test_divider echo "Create Model Versions with VID: $vid_with_pids PID: $pid SoftwareVersion: $sv" result=$(echo "test1234" | dcld tx model add-model-version --vid=$vid_with_pids --pid=$pid --softwareVersion=$sv --minApplicableSoftwareVersion=1 --maxApplicableSoftwareVersion=15 --softwareVersionString=$sv --cdVersionNumber=$cd_version_num --from=$vendor_account_with_pids --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -132,6 +136,7 @@ schema_version_3=3 newCommissionerRemoteUiFlowURL="https://commissionerRemoteUiFlowURL.dclmodel.updated" newCommissioningModeInitialStepsHint=8 result=$(echo "test1234" | dcld tx model update-model --vid=$vid --pid=$pid --from $vendor_account --yes --productLabel "$description" --schemaVersion=$schema_version_3 --commissionerRemoteUiFlowURL="$newCommissionerRemoteUiFlowURL" --commissioningModeInitialStepsHint="$newCommissioningModeInitialStepsHint") +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -139,6 +144,7 @@ test_divider echo "Update Model with VID: ${vid_with_pids} PID: ${pid} with new description" result=$(echo "test1234" | dcld tx model update-model --vid=$vid_with_pids --pid=$pid --from $vendor_account_with_pids --yes --productLabel "$description") +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -159,6 +165,7 @@ test_divider echo "Update Model with VID: ${vid} PID: ${pid} modifying supportURL" supportURL="https://newsupporturl.test" result=$(echo "test1234" | dcld tx model update-model --vid=$vid --pid=$pid --from $vendor_account --yes --supportURL "$supportURL") +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -176,12 +183,14 @@ test_divider echo "Delete Model with VID: ${vid} PID: ${pid}" result=$(dcld tx model delete-model --vid=$vid --pid=$pid --from=$vendor_account --yes) +result=$(get_txn_result "$result") echo "$result" test_divider echo "Delete Model with VID: ${vid_with_pids} PID: ${pid}" result=$(dcld tx model delete-model --vid=$vid_with_pids --pid=$pid --from=$vendor_account_with_pids --yes) +result=$(get_txn_result "$result") echo "$result" test_divider diff --git a/integration_tests/cli/model-negative-cases.sh b/integration_tests/cli/model-negative-cases.sh index 3e84c4af9..3d1e64b18 100755 --- a/integration_tests/cli/model-negative-cases.sh +++ b/integration_tests/cli/model-negative-cases.sh @@ -45,11 +45,13 @@ create_new_vendor_account $vendor_account_with_pids $vid_with_pids $pid_ranges echo "Add Model with VID: $vid PID: $pid: Not Vendor" result=$(echo "test1234" | dcld tx model add-model --vid=$vid --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from=$certification_house --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "\"code\": 4" echo "$result" echo "Add Model with VID: $vid_with_pids PID: 101 :Vendor with non-associated PID" result=$(echo "test1234" | dcld tx model add-model --vid=$vid_with_pids --pid=101 --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from=$vendor_account_with_pids --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "\"code\": 4" echo "$result" @@ -58,6 +60,7 @@ test_divider vid1=$RANDOM echo "Add Model with VID: $vid1 PID: $pid: Vendor ID does not belong to vendor" result=$(echo "test1234" | dcld tx model add-model --vid=$vid1 --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "\"code\": 4" echo "$result" @@ -65,7 +68,9 @@ test_divider echo "Add Model with VID: $vid PID: $pid: Twice" result=$(echo "test1234" | dcld tx model add-model --vid=$vid --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from=$vendor_account --yes) +result=$(get_txn_result "$result") result=$(echo "test1234" | dcld tx model add-model --vid=$vid --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "\"code\": 501" echo "$result" @@ -75,6 +80,7 @@ sv=$RANDOM svs=$RANDOM echo "Create a Device Model Version with VID: $vid PID: $pid SV: $sv" result=$(echo 'test1234' | dcld tx model add-model-version --cdVersionNumber=1 --maxApplicableSoftwareVersion=10 --minApplicableSoftwareVersion=1 --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$softwareVersionString --from=$vendor_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" @@ -83,10 +89,12 @@ zigbee_certification_type="zigbee" matter_certification_type="matter" cd_certificate_id="123" result=$(echo 'test1234' | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --cdVersionNumber=1 --certificationType="$zigbee_certification_type" --certificationDate="$certification_date" --softwareVersionString=$softwareVersionString --cdCertificateId="$cd_certificate_id" --from $zb_account --yes) +result=$(get_txn_result "$result") echo "$result" echo "Delete Model with VID: ${vid} PID: ${pid}" -result=$(dcld tx model delete-model --vid=$vid --pid=$pid --from=$vendor_account --yes) +result=$(echo 'test1234' | dcld tx model delete-model --vid=$vid --pid=$pid --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 525" # code for model certified error @@ -94,6 +102,7 @@ check_response "$result" "\"code\": 525" # code for model certified error echo "Add Model with VID: $vid PID: $pid: Unknown account" result=$(echo "test1234" | dcld tx model add-model --vid=$vid --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from=$vendor_account --yes) +result=$(get_txn_result "$result") result=$(dcld tx model add-model --vid=$vid --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from "Unknown" 2>&1) || true check_response_and_report "$result" "key not found" raw @@ -107,7 +116,7 @@ check_response_and_report "$result" "Vid must not be less than 1" raw result=$(echo "test1234" | dcld tx model add-model --vid=$vid --pid=$i --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from $vendor_account --yes 2>&1) || true check_response_and_report "$result" "Pid must not be less than 1" raw -i="0" +i="0" result=$(echo "test1234" | dcld tx model add-model --vid=$i --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from $vendor_account --yes 2>&1) || true check_response_and_report "$result" "Vid must not be less than 1" raw @@ -121,7 +130,7 @@ check_response_and_report "$result" "Vid must not be greater than 65535" raw result=$(echo "test1234" | dcld tx model add-model --vid=$vid --pid=$i --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from $vendor_account --yes 2>&1) || true check_response_and_report "$result" "Pid must not be greater than 65535" raw -i="string" +i="string" result=$(echo "test1234" | dcld tx model add-model --vid=$i --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel=TestingProductLabel --partNumber=1 --commissioningCustomFlow=0 --from $vendor_account --yes 2>&1) || true check_response_and_report "$result" "invalid syntax" raw diff --git a/integration_tests/cli/model-validation-cases.sh b/integration_tests/cli/model-validation-cases.sh index 967ee0459..a447b6624 100755 --- a/integration_tests/cli/model-validation-cases.sh +++ b/integration_tests/cli/model-validation-cases.sh @@ -33,6 +33,7 @@ test_divider # Create a new model with minimum fields echo "Add Model with minimum required fields with VID: $vid_1 PID: $pid_1" result=$(echo "test1234" | dcld tx model add-model --vid=$vid_1 --pid=$pid_1 --deviceTypeID=1 --productName=TestProduct --productLabel="Test Product" --partNumber=1 --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -55,6 +56,7 @@ result=$(echo "test1234" | dcld tx model add-model --vid=$vid_1 --pid=$pid_2 --d --commissioningModeInitialStepsHint=1 --commissioningModeInitialStepsInstruction="Initial Instructions" \ --commissioningModeSecondaryStepsHint=2 --commissioningModeSecondaryStepsInstruction="Secondary Steps Instruction" \ --userManualURL="https://usermanual.url" --productURL="https://product.url.info" --lsfURL="https://lsf.url.info" --supportURL="https://support.url.info" --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -90,6 +92,7 @@ result=$(echo "test1234" | dcld tx model add-model --vid=$vid_1 --pid=$pid_3 --d --commissioningModeInitialStepsHint=1 --commissioningModeInitialStepsInstruction="Initial Instructions" \ --commissioningModeSecondaryStepsHint=2 --commissioningModeSecondaryStepsInstruction="Secondary Steps Instruction" \ --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -118,6 +121,7 @@ test_divider # Update model with mutable fields and make sure they are updated properly echo "Update model with mutable fields and make sure they are updated properly VID: $vid_1 PID: $pid_1" result=$(echo "test1234" | dcld tx model update-model --vid=$vid_1 --pid=$pid_1 --productName="Updated Product Name" --productLabel="Updated Test Product" --partNumber="2" --lsfURL="https://lsf.url.info?v=1" --lsfRevision=1 --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -138,6 +142,7 @@ test_divider # Update model with just one mutable fields and make sure they are updated properly echo "Update model with just one mutable field and make sure they are updated properly VID: $vid_1 PID: $pid_1" result=$(echo "test1234" | dcld tx model update-model --vid=$vid_1 --pid=$pid_1 --productLabel="Updated Test Product V2" --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -159,6 +164,7 @@ result=$(echo "test1234" | dcld tx model update-model --vid=$vid_1 --pid=$pid_1 --productLabel="Updated Test Product V3" --commissioningModeInitialStepsInstruction="Instructions updated v3" \ --commissioningModeSecondaryStepsInstruction="Secondary Instructions v3" --userManualURL="https://userManual.info/v3" \ --supportURL="https://support.url.info/v3" --productURL="https://product.landingpage.url" --lsfURL="https://lsf.url.info?v=2" --lsfRevision=2 --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -185,6 +191,7 @@ test_divider # Update model with just one mutable fields and make sure they are updated properly echo "Update model with just one mutable field and make sure all other mutated fields are still the same for VID: $vid_1 PID: $pid_1" result=$(echo "test1234" | dcld tx model update-model --vid=$vid_1 --pid=$pid_1 --productLabel="Updated Test Product V4" --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -210,6 +217,7 @@ test_divider # Update model with just one mutable fields and make sure they are updated properly echo "Update model with no fields are still the same for VID: $vid_1 PID: $pid_1" result=$(echo "test1234" | dcld tx model update-model --vid=$vid_1 --pid=$pid_1 --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -237,12 +245,14 @@ test_divider # Update the model with lsfRevision equal to the existing lsfRevision echo "Update the model with lsfRevision equal to the existing lsfRevision make sure we get error back VID: $vid_1 PID: $pid_1" result=$(echo "test1234" | dcld tx model update-model --vid=$vid_1 --pid=$pid_1 --lsfURL="https://lsf.url.info?v=3" --lsfRevision=2 --from=$vendor_account_1 --yes 2>&1) || true +result=$(get_txn_result "$result") check_response_and_report "$result" "LsfRevision should monotonically increase by 1" raw test_divider echo "Update the model with lsfRevision less then the existing lsfRevision make sure we get error back VID: $vid_1 PID: $pid_1" result=$(echo "test1234" | dcld tx model update-model --vid=$vid_1 --pid=$pid_1 --lsfURL="https://lsf.url.info?v=3" --lsfRevision=1 --from=$vendor_account_1 --yes 2>&1) || true +result=$(get_txn_result "$result") check_response_and_report "$result" "LsfRevision should monotonically increase by 1" raw test_divider @@ -255,6 +265,7 @@ sv_1=$RANDOM # Create a new model version echo "Create a Device Model Version with minimum mandatory fields for VID: $vid_1 PID: $pid_1 SV: $sv_1" result=$(echo 'test1234' | dcld tx model add-model-version --cdVersionNumber=1 --maxApplicableSoftwareVersion=20 --minApplicableSoftwareVersion=10 --vid=$vid_1 --pid=$pid_1 --softwareVersion=$sv_1 --softwareVersionString=1 --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") echo "$result" check_response_and_report "$result" "\"code\": 0" @@ -278,6 +289,7 @@ test_divider # Update the model version with only one mutable field and make sure all other fields are still the same echo "Update Device Model Version with only one mutable field and make sure all other fields are still the same for VID: $vid_1 PID: $pid_1 SV: $sv_1" result=$(echo "test1234" | dcld tx model update-model-version --vid=$vid_1 --pid=$pid_1 --softwareVersion=$sv_1 --softwareVersionValid=false --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "\"code\": 0" test_divider @@ -301,6 +313,7 @@ test_divider echo "Update Device Model Version with few mutable fields and make sure all other fields are still the same for VID: $vid_1 PID: $pid_1 SV: $sv_1" result=$(echo "test1234" | dcld tx model update-model-version --vid=$vid_1 --pid=$pid_1 --softwareVersion=$sv_1 --softwareVersionValid=true \ --releaseNotesURL="https://release.url.info" --otaURL="https://ota.url.com" --otaFileSize=123 --otaChecksum="123123123" --minApplicableSoftwareVersion=2 --maxApplicableSoftwareVersion=20 --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "\"code\": 0" test_divider @@ -332,6 +345,7 @@ result=$(echo 'test1234' | dcld tx model add-model-version --vid=$vid_1 --pid=$p --softwareVersionValid=true --otaURL="https://ota.url.info" --otaFileSize=123456789 \ --otaChecksum="123456789012345678901234567890123456789012345678901234567890123" --releaseNotesURL="https://release.notes.url.info" \ --otaChecksumType=1 --maxApplicableSoftwareVersion=32 --minApplicableSoftwareVersion=5 --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") echo "$result" check_response_and_report "$result" "\"code\": 0" @@ -361,6 +375,7 @@ test_divider # Update the model version with minimum fields i.e. no update at all echo "Update Device Model Version with only one mutable field and make sure all other fields are still the same for VID: $vid_1 PID: $pid_1 SV: $sv_1" result=$(echo "test1234" | dcld tx model update-model-version --vid=$vid_1 --pid=$pid_1 --softwareVersion=$sv_1 --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "\"code\": 0" test_divider @@ -389,6 +404,7 @@ test_divider # Update the model version with only one mutable field and make sure all other fields are still the same echo "Update Device Model Version with only one mutable field and make sure all other fields are still the same for VID: $vid_1 PID: $pid_1 SV: $sv_1" result=$(echo "test1234" | dcld tx model update-model-version --vid=$vid_1 --pid=$pid_1 --softwareVersion=$sv_1 --softwareVersionValid=false --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "\"code\": 0" test_divider @@ -418,6 +434,7 @@ echo "Update Device Model Version with all mutable fields for VID: $vid_1 PID: $ result=$(echo 'test1234' | dcld tx model update-model-version --vid=$vid_1 --pid=$pid_1 --softwareVersion=$sv_1 \ --softwareVersionValid=true --otaURL="https://updated.ota.url.info" --releaseNotesURL="https://updated.release.notes.url.info" \ --maxApplicableSoftwareVersion=25 --minApplicableSoftwareVersion=15 --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") echo "$result" check_response_and_report "$result" "\"code\": 0" @@ -450,6 +467,7 @@ result=$(echo 'test1234' | dcld tx model add-model-version --vid=$vid_1 --pid=$p --softwareVersionValid=true --otaURL="https://ota.url.info" --otaFileSize=123456789 \ --otaChecksum="123456789012345678901234567890123456789012345678901234567890123" \ --otaChecksumType=1 --maxApplicableSoftwareVersion=32 --minApplicableSoftwareVersion=5 --from=$vendor_account_1 --yes) +result=$(get_txn_result "$result") echo "$result" check_response_and_report "$result" "\"code\": 0" diff --git a/integration_tests/cli/modelversion-demo-hex.sh b/integration_tests/cli/modelversion-demo-hex.sh index a878811e5..6a2fbb68d 100644 --- a/integration_tests/cli/modelversion-demo-hex.sh +++ b/integration_tests/cli/modelversion-demo-hex.sh @@ -32,6 +32,7 @@ create_new_vendor_account $vendor_account $vid_in_hex_format echo "Add Model with VID: $vid_in_hex_format PID: $pid_in_hex_format" result=$(echo "test1234" | dcld tx model add-model --vid=$vid_in_hex_format --pid=$pid_in_hex_format --deviceTypeID=1 --productName=TestProduct --productLabel="Test Product" --partNumber=1 --commissioningCustomFlow=0 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -39,6 +40,7 @@ test_divider sv=$RANDOM echo "Create a Device Model Version with VID: $vid PID: $pid SV: $sv" result=$(echo 'test1234' | dcld tx model add-model-version --cdVersionNumber=1 --maxApplicableSoftwareVersion=10 --minApplicableSoftwareVersion=1 --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=1 --from=$vendor_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" @@ -91,6 +93,7 @@ test_divider # Update the existing model version echo "Update Device Model Version with VID: $vid_in_hex_format PID: $pid_in_hex_format SV: $sv" result=$(echo 'test1234' | dcld tx model update-model-version --vid=$vid_in_hex_format --pid=$pid_in_hex_format --minApplicableSoftwareVersion=2 --maxApplicableSoftwareVersion=10 --softwareVersion=$sv --softwareVersionValid=false --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -114,6 +117,7 @@ test_divider sv2=$RANDOM echo "Create a Second Device Model Version with VID: $vid_in_hex_format PID: $pid_in_hex_format SV: $sv2" result=$(echo 'test1234' | dcld tx model add-model-version --cdVersionNumber=1 --maxApplicableSoftwareVersion=10 --minApplicableSoftwareVersion=1 --vid=$vid --pid=$pid --softwareVersion=$sv2 --softwareVersionString=1 --from=$vendor_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" @@ -138,6 +142,7 @@ new_vid_in_hex_format="0xB17" different_vendor_account=vendor_account_$new_vid_in_hex_format create_new_vendor_account $different_vendor_account $new_vid_in_hex_format result=$(echo 'test1234' | dcld tx model add-model-version --cdVersionNumber=1 --maxApplicableSoftwareVersion=10 --minApplicableSoftwareVersion=1 --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=1 --from=$different_vendor_account --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "transaction should be signed by a vendor account containing the vendorID $vid" test_divider @@ -145,5 +150,6 @@ test_divider # Update model version with vid belonging to another vendor echo "Update a Device Model Version with VID: $vid_in_hex_format PID: $pid_in_hex_format SV: $sv from a different vendor account" result=$(echo 'test1234' | dcld tx model update-model-version --vid=$vid_in_hex_format --pid=$pid_in_hex_format --softwareVersion=$sv --softwareVersionValid=false --from=$different_vendor_account --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "transaction should be signed by a vendor account containing the vendorID $vid" diff --git a/integration_tests/cli/modelversion-demo.sh b/integration_tests/cli/modelversion-demo.sh index 9f6832598..faa9d71a6 100755 --- a/integration_tests/cli/modelversion-demo.sh +++ b/integration_tests/cli/modelversion-demo.sh @@ -29,6 +29,7 @@ create_new_vendor_account $vendor_account $vid # Create a new model version echo "Add Model with VID: $vid PID: $pid" result=$(echo "test1234" | dcld tx model add-model --vid=$vid --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel="Test Product" --partNumber=1 --commissioningCustomFlow=0 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -37,6 +38,7 @@ sv=$RANDOM schema_version_2=2 echo "Create a Device Model Version with VID: $vid PID: $pid SV: $sv" result=$(echo 'test1234' | dcld tx model add-model-version --cdVersionNumber=1 --maxApplicableSoftwareVersion=10 --minApplicableSoftwareVersion=1 --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=1 --schemaVersion=$schema_version_2 --from=$vendor_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" @@ -91,6 +93,7 @@ schema_version_3=3 # Update the existing model version echo "Update Device Model Version with VID: $vid PID: $pid SV: $sv" result=$(echo 'test1234' | dcld tx model update-model-version --vid=$vid --pid=$pid --minApplicableSoftwareVersion=2 --maxApplicableSoftwareVersion=10 --softwareVersion=$sv --softwareVersionValid=false --schemaVersion=$schema_version_3 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -115,6 +118,7 @@ test_divider sv2=$RANDOM echo "Create a Second Device Model Version with VID: $vid PID: $pid SV: $sv2" result=$(echo 'test1234' | dcld tx model add-model-version --cdVersionNumber=1 --maxApplicableSoftwareVersion=10 --minApplicableSoftwareVersion=1 --vid=$vid --pid=$pid --softwareVersion=$sv2 --softwareVersionString=1 --from=$vendor_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" @@ -139,6 +143,7 @@ newvid=$RANDOM different_vendor_account=vendor_account_$newvid create_new_vendor_account $different_vendor_account $newvid result=$(echo 'test1234' | dcld tx model add-model-version --cdVersionNumber=1 --maxApplicableSoftwareVersion=10 --minApplicableSoftwareVersion=1 --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=1 --from=$different_vendor_account --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "transaction should be signed by a vendor account containing the vendorID $vid" test_divider @@ -146,11 +151,13 @@ test_divider # Update model version with vid belonging to another vendor echo "Update a Device Model Version with VID: $vid PID: $pid SV: $sv from a different vendor account" result=$(echo 'test1234' | dcld tx model update-model-version --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionValid=false --from=$different_vendor_account --yes) +result=$(get_txn_result "$result") check_response_and_report "$result" "transaction should be signed by a vendor account containing the vendorID $vid" # Delete existing model version echo "Delete a Device Model Version with VID: $vid PID: $pid SV: $sv" result=$(echo 'test1234' | dcld tx model delete-model-version --vid=$vid --pid=$pid --softwareVersion=$sv --from=$vendor_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" diff --git a/integration_tests/cli/pki-add-vendor-x509-certificates.sh b/integration_tests/cli/pki-add-vendor-x509-certificates.sh index 1903a7cb2..17e609280 100755 --- a/integration_tests/cli/pki-add-vendor-x509-certificates.sh +++ b/integration_tests/cli/pki-add-vendor-x509-certificates.sh @@ -30,16 +30,20 @@ create_new_vendor_account $vendor_account_65521 $vendor_vid_65521 echo "Propose and approve root certificate with vid=$root_cert_with_vid_65521_vid" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_cert_with_vid_65521_path" --vid "$root_cert_with_vid_65521_vid" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_with_vid_65521_subject" --subject-key-id="$root_cert_with_vid_65521_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Try to add the intermediate certificate using an account that does not have vendor role" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_with_vid_65521_path" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 4" echo "Add an intermediate certificate with vid=$intermediate_cert_with_vid_65521_vid by $vendor_account_65521 with vid=$vendor_vid_65521" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_with_vid_65521_path" --from $vendor_account_65521 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all approved root certificates." @@ -53,6 +57,7 @@ check_response "$result" "\"serialNumber\": \"$intermediate_cert_with_vid_65521_ echo "Try to add an intermediate certificate with vid=$intermediate_cert_with_vid_65522_vid by $vendor_account_65521 with vid=$vendor_vid_65521" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_with_vid_65522_path" --from $vendor_account_65521 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 440" echo "Request all approved root certificates should not contain intermediate cert with serialNumber=$intermediate_cert_with_vid_65522_serial_number" @@ -77,8 +82,10 @@ intermediate_cert_with_vid_65522_serial_number="4428370313154203676" echo "Propose and approve non-vid root certificate" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_cert_with_no_vid_path" --vid "65522" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_with_no_vid_subject" --subject-key-id="$root_cert_with_no_vid_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" vendor_vid_65523=65523 @@ -88,6 +95,7 @@ create_new_vendor_account $vendor_account_65523 $vendor_vid_65523 echo "Try to add an intermediate certificate with vid=$intermediate_cert_with_vid_65522_vid by $vendor_account_65523 with vid=$vendor_vid_65523" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_with_vid_65522_path" --from $vendor_account_65523 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 439" echo "Request all approved root certificates should not contain intermediate cert with serialNumber=$intermediate_cert_with_vid_65522_serial_number" @@ -106,6 +114,7 @@ create_new_vendor_account $vendor_account_65522 $vendor_vid_65522 echo "Add an intermediate certificate with vid=$intermediate_cert_with_vid_65522_vid by $vendor_account_65522 with vid=$vendor_vid_65522" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_with_vid_65522_path" --from $vendor_account_65522 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all approved root certificates should contain intermediate cert with serialNumber=$intermediate_cert_with_vid_65522_serial_number" @@ -133,12 +142,15 @@ intermediate_cert_2_serial_number="4" echo "Propose and approve root certificate" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_cert_path" --vid "$vendor_vid_65521" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add first intermediate certificate by $vendor_account_65521" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_1_path" --from $vendor_account_65521 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all approved root certificates." @@ -152,6 +164,7 @@ check_response "$result" "\"serialNumber\": \"$intermediate_cert_1_serial_number echo "Try to add second intermediate certificate with same subject and SKID by $vendor_account_65523" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_2_path" --from $vendor_account_65523 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 4" echo "Request all approved root certificates should not contain intermediate cert with serialNumber=$intermediate_cert_2_serial_number" @@ -170,6 +183,7 @@ create_new_vendor_account $second_vendor_account_65521 $vendor_vid_65521 echo "Add second intermediate certificate with same subject and SKID by $second_vendor_account_65521" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_2_path" --from $vendor_account_65521 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all approved root certificates should contain intermediate cert with serialNumber=$intermediate_cert_2_serial_number" diff --git a/integration_tests/cli/pki-approval.sh b/integration_tests/cli/pki-approval.sh index 0b8d441a7..3f0c9b48d 100755 --- a/integration_tests/cli/pki-approval.sh +++ b/integration_tests/cli/pki-approval.sh @@ -58,10 +58,12 @@ fourth_trustee_pubkey=$(echo $passphrase | dcld keys show $fourth_trustee_accoun echo "$first_trustee_account proposes account for $fourth_trustee_account" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$fourth_trustee_address" --pubkey="$fourth_trustee_pubkey" --roles="Trustee" --from $first_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$second_trustee_account approves account for $fourth_trustee_account" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$fourth_trustee_address" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Verify that the account is now present" @@ -80,14 +82,17 @@ fifth_trustee_pubkey=$(echo $passphrase | dcld keys show $fifth_trustee_account echo "$first_trustee_account proposes account for $fifth_trustee_account" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$fifth_trustee_address" --pubkey="$fifth_trustee_pubkey" --roles="Trustee" --from $first_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$second_trustee_account approves account for $fifth_trustee_account" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$fifth_trustee_address" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$third_trustee_account approves account for $fifth_trustee_account" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$fifth_trustee_address" --from $third_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Verify that fifth account is now present" @@ -105,18 +110,22 @@ sixth_trustee_pubkey=$(echo $passphrase | dcld keys show $sixth_trustee_account echo "$first_trustee_account proposes account for $sixth_trustee_account" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$sixth_trustee_address" --pubkey="$sixth_trustee_pubkey" --roles="Trustee" --from $first_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$second_trustee_account approves account for $sixth_trustee_account" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$sixth_trustee_address" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$third_trustee_account approves account for $sixth_trustee_account" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$sixth_trustee_address" --from $third_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$fourth_trustee_account approves account for $sixth_trustee_account" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$sixth_trustee_address" --from $fourth_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Verify that sixth account is now present" @@ -129,10 +138,12 @@ echo "PROPOSE ROOT CERT" echo "$user_account (Not Trustee) propose Root certificate" root_path="integration_tests/constants/root_cert" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_path" --vid $vid --from $user_account --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo "$fourth_trustee_account (Trustee) propose Root certificate" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_path" --vid $vid --from $fourth_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -141,6 +152,7 @@ echo "Approve Root certificate now 4 Approvals are needed as we have 6 trustees" echo "$first_trustee_account (Trustee) approve Root certificate" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $first_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -153,6 +165,7 @@ test_divider echo "$second_trustee_account (Trustee) approve Root certificate" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -165,6 +178,7 @@ test_divider echo "$third_trustee_account (Trustee) approve Root certificate" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $third_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -181,6 +195,7 @@ test_divider echo "$sixth_trustee_account proposes revoke Root certificate" result=$(echo "$passphrase" | dcld tx pki propose-revoke-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $sixth_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request root certificate proposed to revoke and verify that it contains approval from $sixth_trustee_account" @@ -194,6 +209,7 @@ test_divider echo "$fifth_trustee_account revokes Root certificate" result=$(echo "$passphrase" | dcld tx pki approve-revoke-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $fifth_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request root certificate proposed to revoke and verify that it contains approval from $fifth_trustee_account" @@ -206,6 +222,7 @@ check_response "$result" "\"address\": \"$fifth_trustee_address\"" echo "$fourth_trustee_account revokes Root certificate" result=$(echo "$passphrase" | dcld tx pki approve-revoke-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $fourth_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request root certificate proposed to revoke and verify that it contains approval from $fourth_trustee_account" @@ -219,6 +236,7 @@ check_response "$result" "\"address\": \"$fourth_trustee_address\"" echo "$third_trustee_account revokes Root certificate" result=$(echo "$passphrase" | dcld tx pki approve-revoke-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $third_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Verify Root certificate is now revoked" diff --git a/integration_tests/cli/pki-assign-vid.sh b/integration_tests/cli/pki-assign-vid.sh index 552968031..e765c95c6 100755 --- a/integration_tests/cli/pki-assign-vid.sh +++ b/integration_tests/cli/pki-assign-vid.sh @@ -19,12 +19,15 @@ echo "ASSIGN VID TO ROOT CERTIFICATE THAT ALREADY HAS VID" echo "Propose and approve root certificate" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_cert_subject_path" --vid "$root_cert_vid" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Assing VID" result=$(dcld tx pki assign-vid --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --vid="$root_cert_vid" --from $vendor_admin_account --yes) +result=$(get_txn_result "$result") check_response "$result" "vid is not empty" test_divider diff --git a/integration_tests/cli/pki-demo.sh b/integration_tests/cli/pki-demo.sh index e30ab710b..f526681bd 100755 --- a/integration_tests/cli/pki-demo.sh +++ b/integration_tests/cli/pki-demo.sh @@ -190,10 +190,12 @@ root_path="integration_tests/constants/root_cert" cert_schema_version_1=1 schema_version_2=2 result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_path" --from $user_account --vid $vid --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo "$trustee_account (Trustee) propose Root certificate" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_path" --certificate-schema-version=$cert_schema_version_1 --schemaVersion=$schema_version_2 --from $trustee_account --vid $vid --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -315,6 +317,7 @@ test_divider echo "$second_trustee_account (Second Trustee) approves Root certificate" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" @@ -388,6 +391,7 @@ test_divider echo "$vendor_account adds Intermediate certificate" intermediate_path="integration_tests/constants/intermediate_cert" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_path" --certificate-schema-version=$cert_schema_version_1 --schemaVersion=$schema_version_2 --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" @@ -459,6 +463,7 @@ echo "$vendor_account add Leaf certificate" leaf_path="integration_tests/constants/leaf_cert" schema_version_0=0 result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$leaf_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -667,15 +672,18 @@ test_divider echo "Try to revoke the intermediate certificate when sender is not Vendor account" result=$(echo "$passphrase" | dcld tx pki revoke-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --from=$user_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 4" echo "Try to revoke the intermediate certificate using a vendor account with other VID" result=$(echo "$passphrase" | dcld tx pki revoke-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --from=$vendor_account_65522 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 4" revoke_schema_version_3=3 echo "$vendor_account (Not Trustee) revokes only Intermediate certificate. This must not revoke its child - Leaf certificate." result=$(echo "$passphrase" | dcld tx pki revoke-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --schemaVersion=$revoke_schema_version_3 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -817,6 +825,7 @@ test_divider revoke_schema_version_4=4 echo "$trustee_account (Trustee) proposes to revoke only Root certificate(child certificates should not be revoked)" result=$(echo "$passphrase" | dcld tx pki propose-revoke-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --schemaVersion=$revoke_schema_version_4 --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -924,6 +933,7 @@ test_divider echo "$second_trustee_account (Second Trustee) approves to revoke Root certificate" result=$(echo "$passphrase" | dcld tx pki approve-revoke-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1174,11 +1184,13 @@ cert_schema_version_0=0 echo "$user_account (Not Trustee) propose Root certificate" google_root_path="integration_tests/constants/google_root_cert" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$google_root_path" --from $user_account --vid=$google_cert_vid --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo "$trustee_account (Trustee) propose Root certificate" google_root_path="integration_tests/constants/google_root_cert" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$google_root_path" --from $trustee_account --vid=$google_cert_vid --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1287,6 +1299,7 @@ test_divider echo "$second_trustee_account (Second Trustee) approves Root certificate" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$google_cert_subject" --subject-key-id="$google_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1354,6 +1367,7 @@ test_divider echo "$trustee_account (Trustee) proposes to revoke Root certificate" result=$(echo "$passphrase" | dcld tx pki propose-revoke-x509-root-cert --subject="$google_cert_subject" --subject-key-id="$google_cert_subject_key_id" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1430,6 +1444,7 @@ test_divider echo "$second_trustee_account (Second Trustee) approves to revoke Root certificate" result=$(echo "$passphrase" | dcld tx pki approve-revoke-x509-root-cert --subject="$google_cert_subject" --subject-key-id="$google_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1518,10 +1533,12 @@ test_divider echo "$trustee_account (Trustee) propose Root certificate" test_root_path="integration_tests/constants/test_root_cert" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$test_root_path" --vid=$test_cert_vid --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$trustee_account (Trustee) rejects Root certificate" result=$(echo $passphrase | dcld tx pki reject-add-x509-root-cert --subject="$test_cert_subject" --subject-key-id="$test_cert_subject_key_id" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1558,11 +1575,13 @@ test_divider echo "$user_account (Not Trustee) propose Root certificate" test_root_path="integration_tests/constants/test_root_cert" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$test_root_path" --vid=$test_cert_vid --from $user_account --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo "$trustee_account (Trustee) propose Root certificate" test_root_path="integration_tests/constants/test_root_cert" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$test_root_path" --vid=$test_cert_vid --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1602,40 +1621,47 @@ test_divider echo "Jack proposes account for $new_trustee1" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$new_trustee_address1" --pubkey="$new_trustee_pubkey1" --roles="Trustee" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Alice approves account for \"$new_trustee1\"" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$new_trustee_address1" --info="Alice is approving this account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Bob (Trustee) approves Root certificate" result=$(echo $passphrase | dcld tx pki approve-add-x509-root-cert --subject="$test_cert_subject" --subject-key-id="$test_cert_subject_key_id" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "$trustee_account (Trustee) rejects Root certificate" result=$(echo $passphrase | dcld tx pki reject-add-x509-root-cert --subject="$test_cert_subject" --subject-key-id="$test_cert_subject_key_id" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "$trustee_account (Trustee) can approve Root certificate even if already has rejected" result=$(echo $passphrase | dcld tx pki approve-add-x509-root-cert --subject="$test_cert_subject" --subject-key-id="$test_cert_subject_key_id" --from $trustee_account --yes 2>&1 || true) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "$trustee_account (Trustee) rejects Root certificate" result=$(echo $passphrase | dcld tx pki reject-add-x509-root-cert --subject="$test_cert_subject" --subject-key-id="$test_cert_subject_key_id" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "$trustee_account (Trustee) cannot reject Root certificate for the second time" result=$(echo $passphrase | dcld tx pki reject-add-x509-root-cert --subject="$test_cert_subject" --subject-key-id="$test_cert_subject_key_id" --from $trustee_account --yes 2>&1 || true) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" test_divider @@ -1666,20 +1692,24 @@ test_divider reject_schema_version_4=4 echo "$second_trustee_account (Second Trustee) rejects Root certificate" result=$(echo "$passphrase" | dcld tx pki reject-add-x509-root-cert --subject="$test_cert_subject" --subject-key-id="$test_cert_subject_key_id" --schemaVersion=$reject_schema_version_4 --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Alice proposes to revoke account for $new_trustee1" result=$(echo $passphrase | dcld tx auth propose-revoke-account --address="$new_trustee_address1" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Bob approves to revoke account for $new_trustee1" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$new_trustee_address1" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Jack approves to revoke account for $new_trustee1" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$new_trustee_address1" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" @@ -1730,11 +1760,13 @@ test_divider echo "$user_account (Not Trustee) propose Root certificate" test_root_path="integration_tests/constants/test_root_cert" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$test_root_path" --vid $test_cert_vid --from $user_account --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo "$trustee_account (Trustee) propose Root certificate" test_root_path="integration_tests/constants/test_root_cert" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$test_root_path" --vid $test_cert_vid --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -1763,6 +1795,7 @@ test_divider echo "$second_trustee_account (Trustee) approve Root certificate" result=$(echo $passphrase | dcld tx pki approve-add-x509-root-cert --subject="$test_cert_subject" --subject-key-id="$test_cert_subject_key_id" --from $second_trustee_account_address --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider diff --git a/integration_tests/cli/pki-noc-certs.sh b/integration_tests/cli/pki-noc-certs.sh index 5a71753dc..da12f77fb 100755 --- a/integration_tests/cli/pki-noc-certs.sh +++ b/integration_tests/cli/pki-noc-certs.sh @@ -149,22 +149,26 @@ test_divider echo "Try to add intermediate cert using add-noc-x509-root-cert command" intermediate_path="integration_tests/constants/intermediate_cert" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-root-cert --certificate="$intermediate_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 414" cert_schema_version_1=1 schema_version_2=2 echo "Add first NOC root certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-root-cert --certificate="$noc_root_cert_1_path" --certificate-schema-version=$cert_schema_version_1 --schemaVersion=$schema_version_2 --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" cert_schema_version_0=0 schema_version_0=0 echo "Add second NOC root certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-root-cert --certificate="$noc_root_cert_2_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add third NOC root certificate by vendor with VID = $vid_2" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-root-cert --certificate="$noc_root_cert_3_path" --from $vendor_account_2 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -261,6 +265,7 @@ check_response "$result" "\"subjectAsText\": \"$noc_root_cert_1_subject_as_text\ echo "Add first intermidiate NOC certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_cert_1_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request intermidiate NOC certificate by VID = $vid" @@ -281,6 +286,7 @@ check_response "$result" "\"subjectKeyId\": \"$noc_cert_1_subject_key_id\"" echo "Try to add intermediate with different VID = $vid_2" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_cert_2_path" --from $vendor_account_2 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 439" test_divider @@ -289,10 +295,12 @@ cert_schema_version_3=3 schema_version_4=4 echo "Add second NOC certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_cert_2_path" --certificate-schema-version=$cert_schema_version_3 --schemaVersion=$schema_version_4 --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add third NOC certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_cert_1_copy_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all NOC certificates" @@ -328,10 +336,12 @@ test_divider echo "Add third NOC root certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-root-cert --certificate="$noc_root_cert_1_copy_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add NOC leaf certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_leaf_cert_1_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request All NOC root certificate" @@ -351,11 +361,13 @@ check_response "$result" "\"serialNumber\": \"$noc_leaf_cert_1_serial_number\"" echo "Try to revoke NOC root certificate with different VID = $vid_2" result=$(echo "$passphrase" | dcld tx pki revoke-noc-x509-root-cert --subject="$noc_root_cert_1_subject" --subject-key-id="$noc_root_cert_1_subject_key_id" --from $vendor_account_2 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 439" revoke_schema_version_5=5 echo "$vendor_account Vendor revokes only root certificate, it should not revoke intermediate certificates" result=$(echo "$passphrase" | dcld tx pki revoke-noc-x509-root-cert --subject="$noc_root_cert_1_subject" --subject-key-id="$noc_root_cert_1_subject_key_id" --schemaVersion=$revoke_schema_version_5 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should contain two root certificates only" @@ -475,11 +487,13 @@ echo "REVOCATION OF NON-ROOT NOC CERTIFICATES" echo "Try to revoke NOC certificate with different VID = $vid_2" result=$(echo "$passphrase" | dcld tx pki revoke-noc-x509-ica-cert --subject="$noc_cert_1_subject" --subject-key-id="$noc_cert_1_subject_key_id" --from $vendor_account_2 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 439" revoke_schema_version_6=6 echo "$vendor_account Vendor revokes only NOC certificates, it should not revoke leaf certificates" result=$(echo "$passphrase" | dcld tx pki revoke-noc-x509-ica-cert --subject="$noc_cert_1_subject" --subject-key-id="$noc_cert_1_subject_key_id" --schemaVersion=$revoke_schema_version_6 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should not contain leaf certificate" diff --git a/integration_tests/cli/pki-noc-revocation-with-revoking-child.sh b/integration_tests/cli/pki-noc-revocation-with-revoking-child.sh index eaee4c4ef..5b2344b6b 100755 --- a/integration_tests/cli/pki-noc-revocation-with-revoking-child.sh +++ b/integration_tests/cli/pki-noc-revocation-with-revoking-child.sh @@ -50,18 +50,22 @@ test_divider echo "Add first NOC root certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-root-cert --certificate="$noc_root_cert_1_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add second NOC root certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-root-cert --certificate="$noc_root_cert_1_copy_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add first NOC certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_cert_1_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add NOC leaf certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_leaf_cert_1_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -80,6 +84,7 @@ check_response "$result" "\"serialNumber\": \"$noc_leaf_cert_1_serial_number\"" echo "$vendor_account Vendor revokes root NOC certificate by setting \"revoke-child\" flag to true, it should revoke child certificates too" result=$(echo "$passphrase" | dcld tx pki revoke-noc-x509-root-cert --subject="$noc_root_cert_1_subject" --subject-key-id="$noc_root_cert_1_subject_key_id" --revoke-child=true --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should contain two root, one intermediate and one leaf certificates" @@ -178,18 +183,22 @@ echo "REVOCATION OF NOC NON-ROOT CERTIFICATES" echo "Add NOC root certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-root-cert --certificate="$noc_root_cert_2_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add intermidiate NOC certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_cert_2_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add second intermidiate NOC certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_cert_2_copy_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add leaf certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_leaf_cert_2_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request All NOC root certificate" @@ -206,6 +215,7 @@ check_response "$result" "\"serialNumber\": \"$noc_leaf_cert_2_serial_number\"" echo "$vendor_account Vendor revokes non-root NOC certificate by setting \"revoke-child\" flag to true, it should revoke child certificates too" result=$(echo "$passphrase" | dcld tx pki revoke-noc-x509-ica-cert --subject="$noc_cert_2_subject" --subject-key-id="$noc_cert_2_subject_key_id" --revoke-child=true --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should two intermediate and one leaf certificates" diff --git a/integration_tests/cli/pki-noc-revocation-with-serial-number.sh b/integration_tests/cli/pki-noc-revocation-with-serial-number.sh index 7cadcdabb..94c3c800b 100755 --- a/integration_tests/cli/pki-noc-revocation-with-serial-number.sh +++ b/integration_tests/cli/pki-noc-revocation-with-serial-number.sh @@ -50,18 +50,22 @@ test_divider echo "Add first NOC root certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-root-cert --certificate="$noc_root_cert_1_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add second NOC root certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-root-cert --certificate="$noc_root_cert_1_copy_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add intermidiate NOC certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_cert_1_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add NOC leaf certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_leaf_cert_1_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -88,10 +92,12 @@ check_response "$result" "\"serialNumber\": \"$noc_leaf_cert_1_serial_number\"" echo "Try to revoke intermediate with invalid serialNumber" result=$(echo "$passphrase" | dcld tx pki revoke-noc-x509-root-cert --subject="$noc_root_cert_1_subject" --subject-key-id="$noc_root_cert_1_subject_key_id" --serial-number="invalid" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 404" echo "$vendor_account Vendor revokes root NOC certificate with serialNumber=$noc_root_cert_1_serial_number only, it should not revoke child certificates" result=$(echo "$passphrase" | dcld tx pki revoke-noc-x509-root-cert --subject="$noc_root_cert_1_subject" --subject-key-id="$noc_root_cert_1_subject_key_id" --serial-number="$noc_root_cert_1_serial_number" --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should contain root certificate with serialNumber=$noc_root_cert_1_serial_number" @@ -188,6 +194,7 @@ test_divider echo "$vendor_account Vendor revokes second root NOC certificate by serialNumber with \"revoke-child\" flag set to true, it should remove child certificates too" result=$(echo "$passphrase" | dcld tx pki revoke-noc-x509-root-cert --subject="$noc_root_cert_1_subject" --subject-key-id="$noc_root_cert_1_subject_key_id" --serial-number="$noc_root_cert_1_copy_serial_number" --revoke-child=true --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should contain two root, one intermediate and one leaf certificates" @@ -286,18 +293,22 @@ echo "REVOCATION OF NOC NON-ROOT CERTIFICATES" echo "Add NOC root certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-root-cert --certificate="$noc_root_cert_2_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add first intermidiate NOC certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_cert_2_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add second intermidiate NOC certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_cert_2_copy_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add leaf certificate by vendor with VID = $vid" result=$(echo "$passphrase" | dcld tx pki add-noc-x509-ica-cert --certificate="$noc_leaf_cert_2_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request All NOC root certificate" @@ -314,10 +325,12 @@ check_response "$result" "\"serialNumber\": \"$noc_leaf_cert_2_serial_number\"" echo "Try to revoke intermediate with invalid serialNumber" result=$(echo "$passphrase" | dcld tx pki revoke-noc-x509-ica-cert --subject="$noc_cert_2_subject" --subject-key-id="$noc_cert_2_subject_key_id" --serial-number="invalid" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 404" echo "$vendor_account Vendor revokes NOC certificate with serialNumber=$noc_cert_2_serial_number only, it should not revoke child certificates" result=$(echo "$passphrase" | dcld tx pki revoke-noc-x509-ica-cert --subject="$noc_cert_2_subject" --subject-key-id="$noc_cert_2_subject_key_id" --serial-number="$noc_cert_2_serial_number" --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should contain one intermediate certificate only" @@ -370,6 +383,7 @@ echo $result | jq echo "$vendor_account Vendor revokes NOC certificate with serialNumber=$noc_cert_2_serial_number with \"revoke-child\" flag set to true, it should revoke child certificates too" result=$(echo "$passphrase" | dcld tx pki revoke-noc-x509-ica-cert --subject="$noc_cert_2_subject" --subject-key-id="$noc_cert_2_subject_key_id" --serial-number="$noc_cert_2_copy_serial_number" --revoke-child=true --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should contain two intermediate and one leaf certificates" diff --git a/integration_tests/cli/pki-remove-x509-certificates.sh b/integration_tests/cli/pki-remove-x509-certificates.sh index f7024e38d..5917cd792 100755 --- a/integration_tests/cli/pki-remove-x509-certificates.sh +++ b/integration_tests/cli/pki-remove-x509-certificates.sh @@ -34,20 +34,25 @@ create_new_vendor_account $vendor_account_65522 65522 echo "Propose and approve root certificate 1" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_cert_1_path" --vid "$root_cert_vid" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add an intermediate certificate with serialNumber 3" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_1_path" --from $vendor_account_65521 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add an intermediate certificate with serialNumber 4" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_2_path" --from $vendor_account_65521 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add a leaf certificate with serialNumber 5" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$leaf_cert_path" --from $vendor_account_65521 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all approved root certificates." @@ -65,6 +70,7 @@ check_response "$result" "\"serialNumber\": \"$leaf_cert_serial_number\"" echo "Revoke an intermediate certificate with serialNumber 3" result=$(echo "$passphrase" | dcld tx pki revoke-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --serial-number="$intermediate_cert_1_serial_number" --from=$vendor_account_65521 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should contain only one intermediate certificate with serialNumber 3" @@ -77,18 +83,22 @@ response_does_not_contain "$result" "\"serialNumber\": \"$intermediate_cert_2_se echo "Remove intermediate certificate with invalid serialNumber" result=$(echo "$passphrase" | dcld tx pki remove-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --serial-number="invalid" --from=$vendor_account_65521 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 404" echo "Try to remove the intermediate certificate when sender is not Vendor account" result=$(echo "$passphrase" | dcld tx pki remove-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --serial-number="$intermediate_cert_1_serial_number" --from=$trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 4" echo "Try to remove the intermediate certificate using a vendor account with other VID" result=$(echo "$passphrase" | dcld tx pki remove-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --serial-number="$intermediate_cert_1_serial_number" --from=$vendor_account_65522 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 4" echo "Remove intermediate certificate with serialNumber 3" result=$(echo "$passphrase" | dcld tx pki remove-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --serial-number="$intermediate_cert_1_serial_number" --from=$vendor_account_65521 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all certificates should not contain intermediate certificate with serialNumber 3" @@ -114,6 +124,7 @@ response_does_not_contain "$result" "\"serialNumber\": \"$intermediate_cert_1_se echo "Remove an intermediate certificate with subject and subjectKeyId" result=$(echo "$passphrase" | dcld tx pki remove-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --from=$vendor_account_65521 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request approved certificates by an intermediate certificate's subject and subjectKeyId should be empty" @@ -147,6 +158,7 @@ response_does_not_contain "$result" "\"serialNumber\": \"$intermediate_cert_1_se echo "Remove leaf certificate" result=$(echo "$passphrase" | dcld tx pki remove-x509-cert --subject="$leaf_cert_subject" --subject-key-id="$leaf_cert_subject_key_id" --from=$vendor_account_65521 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request approved leaf certificates should be empty" diff --git a/integration_tests/cli/pki-revocation-points.sh b/integration_tests/cli/pki-revocation-points.sh index b7a139b5f..ed37d7ecc 100755 --- a/integration_tests/cli/pki-revocation-points.sh +++ b/integration_tests/cli/pki-revocation-points.sh @@ -103,6 +103,7 @@ test_divider echo "4. ADD REVOCATION POINT NOT BY VENDOR" result=$(dcld tx pki add-revocation-point --vid=$vid --is-paa="true" --certificate="$paa_cert_with_numeric_vid_path" --label="$label" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --revocation-type=1 --from=$trustee_account --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo $result @@ -112,6 +113,7 @@ test_divider echo "5. ADD REVOCATION POINT FOR PAA WHEN SENDER VID IS NOT EQUAL VID FIELD" result=$(dcld tx pki add-revocation-point --vid=$vid --is-paa="true" --certificate="$paa_cert_with_numeric_vid_path" --label="$label" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --revocation-type=1 --from=$vendor_account_65522 --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo $result @@ -121,6 +123,7 @@ test_divider echo "6. ADD REVOCATION POINT FOR PAA WHEN CERTIFICATE DOES NOT EXIST" result=$(dcld tx pki add-revocation-point --vid=$vid --is-paa="true" --certificate="$paa_cert_with_numeric_vid_path" --label="$label" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --revocation-type=1 --from=$vendor_account --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo $result @@ -129,26 +132,34 @@ test_divider echo "Trustees add PAA cert with numeric vid to the ledger" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$paa_cert_with_numeric_vid_path" --vid $vid --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$paa_cert_with_numeric_vid_subject" --subject-key-id="$paa_cert_with_numeric_vid_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Trustees add PAA no VID" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$paa_cert_no_vid_path" --vid $vid_non_vid_scoped --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$paa_cert_no_vid_subject" --subject-key-id="$paa_cert_no_vid_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Trustees add root cert" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_cert_path" --vid $vid --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Trustees add test root cert" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$test_root_cert_path" --vid $vid_non_vid_scoped --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$test_root_cert_subject" --subject-key-id="$test_root_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Trustees add VID scoped root cert" @@ -162,6 +173,7 @@ test_divider echo "7. ADD REVOCATION POINT FOR PAA WHEN CRL SIGNER CERTIFICATE PEM VALUE IS NOT EQUAL TO STORED CERTIFICATE PEM VALUE" result=$(dcld tx pki add-revocation-point --vid=$vid_65522 --is-paa="true" --certificate="$paa_cert_with_numeric_vid1_path" --label="$label" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --revocation-type=1 --from=$vendor_account_65522 --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo $result @@ -170,6 +182,7 @@ test_divider echo "8. ADD REVOCATION POINT FOR PAI NOT BY VENDOR" result=$(dcld tx pki add-revocation-point --vid=$vid --pid=$pid --is-paa="false" --certificate="$pai_cert_with_numeric_vid_pid_path" --label="$label" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --revocation-type=1 --from=$trustee_account --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo $result @@ -180,6 +193,7 @@ echo "9. ADD REVOCATION POINT FOR VID-SCOPED PAA" schema_version_2=2 result=$(dcld tx pki add-revocation-point --vid=$vid --is-paa="true" --certificate="$paa_cert_with_numeric_vid_path" --label="$label" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --revocation-type=1 --schemaVersion=$schema_version_2 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo $result @@ -201,11 +215,13 @@ check_response "$result" "\"issuerSubjectKeyID\": \"$issuer_subject_key_id\"" echo "Can not add the same REVOCATION POINT second time: (vid, issuer, label) - key already exist" result=$(dcld tx pki add-revocation-point --vid=$vid --is-paa="true" --certificate="$paa_cert_with_numeric_vid_path" --label="$label" --data-url="$data_url-new" --issuer-subject-key-id=$issuer_subject_key_id --revocation-type=1 --from=$vendor_account --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo $result echo "Can not add the same REVOCATION POINT second time: (vid, issuer, dataURL) - key already exist" result=$(dcld tx pki add-revocation-point --vid=$vid --is-paa="true" --certificate="$paa_cert_with_numeric_vid_path" --label="$label-new" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --revocation-type=1 --from=$vendor_account --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo $result @@ -214,6 +230,7 @@ test_divider echo "10. ADD REVOCATION POINT FOR NON-VID-SCOPED PAA" result=$(dcld tx pki add-revocation-point --vid=$vid_non_vid_scoped --is-paa="true" --certificate="$paa_cert_no_vid_path" --label="$label_non_vid_scoped" --data-url="$data_url_non_vid_scoped" --issuer-subject-key-id=$issuer_subject_key_id --revocation-type=1 --from=$vendor_account_non_vid_scoped --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo $result @@ -243,6 +260,7 @@ test_divider echo "11. ADD REVOCATION POINT FOR PAI" result=$(dcld tx pki add-revocation-point --vid=$vid_65522 --is-paa="false" --certificate="$pai_cert_with_numeric_vid_path" --label="$label_pai" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --revocation-type=1 --from=$vendor_account_65522 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo $result @@ -278,6 +296,7 @@ test_divider echo "12. ADD REVOCATION POINT FOR CRL SIGNER CERTIFICATE DELEGATED BY PAI" result=$(dcld tx pki add-revocation-point --vid=$vid --is-paa="false" --certificate="$crl_signer_delegated_by_pai_1" --label="$label_leaf_with_delegator" --data-url="$data_url" --issuer-subject-key-id=$delegator_cert_with_vid_subject_key_id --revocation-type=1 --certificate-delegator="$delegator_cert_with_vid_65521_path" --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(dcld query pki revocation-point --vid=$vid --label=$label_leaf_with_delegator --issuer-subject-key-id=$delegator_cert_with_vid_subject_key_id) @@ -293,10 +312,12 @@ echo "13. ADD REVOCATION POINT FOR CRL SIGNER CERTIFICATE DELEGATED BY PAA" echo "Add PAI certificate" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$delegator_cert_with_vid_65521_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add PKI revocation point with IS_PAA=true" result=$(dcld tx pki add-revocation-point --vid=$vid_65522 --is-paa="true" --certificate="$crl_signer_delegated_by_paa" --label="$label_leaf" --data-url="$data_url" --issuer-subject-key-id=$delegator_cert_with_vid_subject_key_id --revocation-type=1 --from=$vendor_account_65522 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(dcld query pki revocation-point --vid=$vid_65522 --label=$label_leaf --issuer-subject-key-id=$delegator_cert_with_vid_subject_key_id) @@ -310,6 +331,7 @@ test_divider echo "14. UPDATE REVOCATION POINT FOR CRL SIGNER CERTIFICATE DELEGATED BY PAI" data_url_new="$data_url"_new result=$(dcld tx pki update-revocation-point --vid=$vid --certificate="$crl_signer_delegated_by_pai_1" --label="$label_leaf_with_delegator" --data-url="$data_url_new" --issuer-subject-key-id=$delegator_cert_with_vid_subject_key_id --certificate-delegator="$delegator_cert_with_vid_65521_copy_path" --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo $result @@ -326,6 +348,7 @@ test_divider echo "15. UPDATE REVOCATION POINT FOR CRL SIGNER CERTIFICATE DELEGATED BY PAA" result=$(dcld tx pki update-revocation-point --vid=$vid_65522 --certificate="$crl_signer_delegated_by_pai_2" --label="$label_leaf" --data-url="$data_url_new" --issuer-subject-key-id=$delegator_cert_with_vid_subject_key_id --from=$vendor_account_65522 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo $result @@ -343,6 +366,7 @@ test_divider echo "16. UPDATE REVOCATION POINT WHEN POINT NOT FOUND" result=$(dcld tx pki update-revocation-point --vid=$vid_65522 --certificate="$pai_cert_with_numeric_vid_pid_path" --label="$label" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --from=$vendor_account_65522 --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo $result @@ -351,6 +375,7 @@ test_divider echo "17. UPDATE REVOCATION POINT FOR PAA WHEN NEW CERT IS NOT PAA" result=$(dcld tx pki update-revocation-point --vid=$vid --certificate="$pai_cert_with_numeric_vid_pid_path" --label="$label" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --from=$vendor_account --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo $result @@ -359,6 +384,7 @@ test_divider echo "18. UPDATE REVOCATION POINT WHEN SENDER IS NOT VENDOR" result=$(dcld tx pki update-revocation-point --vid=$vid --certificate="$paa_cert_with_numeric_vid_path" --label="$label" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --from=$trustee_account --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo $result @@ -367,6 +393,7 @@ test_divider echo "19. UPDATE REVOCATION POINT FOR PAA WHEN SENDER VID IS NOT EQUAL TO CERT VID" result=$(dcld tx pki update-revocation-point --vid=$vid --certificate="$paa_cert_with_numeric_vid_path" --label="$label" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --from=$vendor_account_65522 --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo $result @@ -375,6 +402,7 @@ test_divider echo "20. UPDATE REVOCATION POINT FOR PAA WHEN MSG VID IS NOT EQUAL TO CERT VID" result=$(dcld tx pki update-revocation-point --vid=$vid_65522 --certificate="$paa_cert_with_numeric_vid_path" --label="$label" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --from=$vendor_account --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo $result @@ -383,6 +411,7 @@ test_divider echo "21. UPDATE REVOCATION POINT FOR VID-SCOPED PAA" schema_version_3=3 result=$(dcld tx pki update-revocation-point --vid=$vid --certificate="$root_cert_path" --label="$label" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --schemaVersion=$schema_version_3 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo $result @@ -397,6 +426,7 @@ test_divider echo "22. UPDATE REVOCATION POINT FOR NON-VID SCOPED PAA" result=$(dcld tx pki update-revocation-point --vid=$vid_non_vid_scoped --certificate="$test_root_cert_path" --label="$label_non_vid_scoped" --data-url="$data_url_non_vid_scoped" --issuer-subject-key-id=$issuer_subject_key_id --from=$vendor_account_non_vid_scoped --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo $result @@ -411,6 +441,7 @@ test_divider echo "23. UPDATE REVOCATION POINT FOR PAI" result=$(dcld tx pki update-revocation-point --vid=$vid_65522 --certificate="$pai_cert_vid_path" --label="$label_pai" --data-url="$data_url" --issuer-subject-key-id=$issuer_subject_key_id --from=$vendor_account_65522 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo $result @@ -425,6 +456,7 @@ test_divider echo "24. DELETE REVOCATION POINT" result=$(dcld tx pki delete-revocation-point --vid=$vid --label="$label" --issuer-subject-key-id=$issuer_subject_key_id --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo $result diff --git a/integration_tests/cli/pki-revocation-with-revoking-child.sh b/integration_tests/cli/pki-revocation-with-revoking-child.sh index 5a816b200..cb19b1db6 100755 --- a/integration_tests/cli/pki-revocation-with-revoking-child.sh +++ b/integration_tests/cli/pki-revocation-with-revoking-child.sh @@ -32,26 +32,32 @@ create_new_vendor_account $vendor_account $root_cert_vid echo "Propose and approve root certificate 1" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_cert_1_path" --vid "$root_cert_vid" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $second_trustee_account --yes) check_response "$result" "\"code\": 0" echo "Propose and approve root certificate 2" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_cert_2_path" --vid "$root_cert_vid" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add an intermediate certificate with serialNumber 3" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_1_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add an intermediate certificate with serialNumber 4" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_2_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add a leaf certificate with serialNumber 5" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$leaf_cert_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all approved root certificates." @@ -65,6 +71,7 @@ check_response "$result" "\"subjectKeyId\": \"$leaf_cert_subject_key_id\"" echo "Revoke intermediate certificates and its child certificates too" result=$(echo "$passphrase" | dcld tx pki revoke-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --revoke-child=true --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should contain two intermediate and one leaf certificates" @@ -93,28 +100,35 @@ response_does_not_contain "$result" "\"serialNumber\": \"$leaf_cert_serial_numbe echo "Remove intermediate and leaf certificates to re-add them again" result=$(echo "$passphrase" | dcld tx pki remove-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki remove-x509-cert --subject="$leaf_cert_subject" --subject-key-id="$leaf_cert_subject_key_id" --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add an intermediate certificate with serialNumber 3" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_1_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add an intermediate certificate with serialNumber 4" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_2_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add a leaf certificate with serialNumber 5" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$leaf_cert_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$trustee_account (Trustee) proposes to revoke Root certificates and its child certificates too" result=$(echo "$passphrase" | dcld tx pki propose-revoke-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --revoke-child=true --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$second_trustee_account (Second Trustee) approves to revoke Root certificate" result=$(echo "$passphrase" | dcld tx pki approve-revoke-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should contain two root, one intermediate and one leaf certificates" diff --git a/integration_tests/cli/pki-revocation-with-serial-number.sh b/integration_tests/cli/pki-revocation-with-serial-number.sh index 243666ece..63372bcef 100755 --- a/integration_tests/cli/pki-revocation-with-serial-number.sh +++ b/integration_tests/cli/pki-revocation-with-serial-number.sh @@ -32,26 +32,33 @@ create_new_vendor_account $vendor_account $root_cert_vid echo "Propose and approve root certificate 1" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_cert_1_path" --vid "$root_cert_vid" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Propose and approve root certificate 2" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_cert_2_path" --vid "$root_cert_vid" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add an intermediate certificate with serialNumber 3" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_1_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add an intermediate certificate with serialNumber 4" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_2_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add a leaf certificate with serialNumber 5" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$leaf_cert_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all approved root certificates." @@ -70,10 +77,12 @@ check_response "$result" "\"serialNumber\": \"$leaf_cert_serial_number\"" echo "Revoke intermediate certificate with invalid serialNumber" result=$(echo "$passphrase" | dcld tx pki revoke-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --serial-number="invalid" --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 404" echo "Revoke intermediate certificate with serialNumber 3 only(child certificates should not be removed)" result=$(echo "$passphrase" | dcld tx pki revoke-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --serial-number="$intermediate_cert_1_serial_number" --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should contain one intermediate certificate with serialNumber 3" @@ -102,6 +111,7 @@ check_response "$result" "\"serialNumber\": \"$leaf_cert_serial_number\"" echo "Revoke intermediate certificate with serialNumber 4 and its child certificates too" result=$(echo "$passphrase" | dcld tx pki revoke-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --serial-number="$intermediate_cert_2_serial_number" --revoke-child=true --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should contain two intermediate and one leaf certificates" @@ -130,20 +140,25 @@ response_does_not_contain "$result" "\"serialNumber\": \"$leaf_cert_serial_numbe echo "Remove intermediate and leaf certificates to re-add them again" result=$(echo "$passphrase" | dcld tx pki remove-x509-cert --subject="$intermediate_cert_subject" --subject-key-id="$intermediate_cert_subject_key_id" --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" result=$(echo "$passphrase" | dcld tx pki remove-x509-cert --subject="$leaf_cert_subject" --subject-key-id="$leaf_cert_subject_key_id" --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add an intermediate certificate with serialNumber 3" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_1_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add an intermediate certificate with serialNumber 4" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_cert_2_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add a leaf certificate with serialNumber 5" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$leaf_cert_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all approved root certificates." @@ -157,14 +172,17 @@ check_response "$result" "\"subjectKeyId\": \"$leaf_cert_subject_key_id\"" echo "$trustee_account (Trustee) proposes to revoke Root certificate with invalid serialNumber" result=$(echo "$passphrase" | dcld tx pki propose-revoke-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --serial-number="invalid" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 404" echo "$trustee_account (Trustee) proposes to revoke Root certificate with serialNumber 1 only(child certificates should not be removed)" result=$(echo "$passphrase" | dcld tx pki propose-revoke-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --serial-number="$root_cert_1_serial_number" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$second_trustee_account (Second Trustee) approves to revoke Root certificate with serialNumber 1 only(child certificates should not be removed)" result=$(echo "$passphrase" | dcld tx pki approve-revoke-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --serial-number="$root_cert_1_serial_number" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should contain one root certificate with serialNumber 1" @@ -194,10 +212,12 @@ response_does_not_contain "$result" "\"serialNumber\": \"$root_cert_1_serial_num echo "$trustee_account (Trustee) proposes to revoke Root certificate with serialNumber 2 and its child certificates too" result=$(echo "$passphrase" | dcld tx pki propose-revoke-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --serial-number="$root_cert_2_serial_number" --revoke-child=true --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$second_trustee_account (Second Trustee) approves to revoke Root certificate with serialNumber 2" result=$(echo "$passphrase" | dcld tx pki approve-revoke-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --serial-number="$root_cert_2_serial_number" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Request all revoked certificates should contain two root, one intermediate and one leaf certificates" diff --git a/integration_tests/cli/upgrade-demo.sh b/integration_tests/cli/upgrade-demo.sh index f4e412b77..7008523eb 100755 --- a/integration_tests/cli/upgrade-demo.sh +++ b/integration_tests/cli/upgrade-demo.sh @@ -24,6 +24,7 @@ echo "Create Trustee account" create_new_account trustee_account "Trustee" random_string upgrade_name propose=$(dcld tx dclupgrade propose-upgrade --name=$upgrade_name --upgrade-height=$upgrade_height --upgrade-info=$upgrade_info --from $trustee_account --yes) +propose=$(get_txn_result "$propose") echo "propose upgrade response: $propose" check_response "$propose" "\"code\": 0" @@ -34,14 +35,17 @@ check_response_and_report "$proposed_dclupgrade_query" "\"height\": \"$upgrade_h check_response_and_report "$proposed_dclupgrade_query" "\"info\": \"$upgrade_info\"" approve=$(dcld tx dclupgrade approve-upgrade --name=$upgrade_name --from alice --yes) +approve=$(get_txn_result "$approve") echo "approve upgrade response: $approve" check_response "$approve" "\"code\": 0" reject=$(dcld tx dclupgrade reject-upgrade --name=$upgrade_name --from alice --yes) +reject=$(get_txn_result "$reject") echo "reject upgrade response: $reject" check_response "$reject" "\"code\": 0" approve=$(dcld tx dclupgrade approve-upgrade --name=$upgrade_name --from alice --yes) +approve=$(get_txn_result "$approve") echo "approve upgrade response: $approve" check_response "$approve" "\"code\": 0" @@ -59,6 +63,7 @@ plan_query=$(dcld query upgrade plan 2>&1) || true check_response "$plan_query" "no upgrade scheduled" raw approve=$(dcld tx dclupgrade approve-upgrade --name=$upgrade_name --from bob --yes) +approve=$(get_txn_result "$approve") echo "approve upgrade response: $approve" check_response "$approve" "\"code\": 0" @@ -85,10 +90,12 @@ test_divider echo "proposer cannot approve upgrade" random_string upgrade_name propose=$(dcld tx dclupgrade propose-upgrade --name=$upgrade_name --upgrade-height=$upgrade_height --upgrade-info=$upgrade_info --from alice --yes) +propose=$(get_txn_result "$propose") echo "propose upgrade response: $propose" check_response "$propose" "\"code\": 0" approve=$(dcld tx dclupgrade approve-upgrade --name=$upgrade_name --from alice --yes) +approve=$(get_txn_result "$approve") echo "approve upgrade response: $approve" check_response_and_report "$approve" "unauthorized" raw @@ -99,14 +106,17 @@ test_divider echo "cannot approve upgrade twice" random_string upgrade_name propose=$(dcld tx dclupgrade propose-upgrade --name=$upgrade_name --upgrade-height=$upgrade_height --upgrade-info=$upgrade_info --from alice --yes) +propose=$(get_txn_result "$propose") echo "propose upgrade response: $propose" check_response "$propose" "\"code\": 0" approve=$(dcld tx dclupgrade approve-upgrade --name=$upgrade_name --from bob --yes) +approve=$(get_txn_result "$approve") echo "approve upgrade response: $approve" check_response "$approve" "\"code\": 0" second_approve=$(dcld tx dclupgrade approve-upgrade --name=$upgrade_name --from bob --yes) +second_approve=$(get_txn_result "$second_approve") echo "second approve upgrade response: $second_approve" check_response_and_report "$second_approve" "unauthorized" raw @@ -116,10 +126,12 @@ test_divider echo "cannot propose upgrade twice" random_string upgrade_name propose=$(dcld tx dclupgrade propose-upgrade --name=$upgrade_name --upgrade-height=$upgrade_height --upgrade-info=$upgrade_info --from alice --yes) +propose=$(get_txn_result "$propose") echo "propose upgrade response: $propose" check_response "$propose" "\"code\": 0" second_propose=$(dcld tx dclupgrade propose-upgrade --name=$upgrade_name --upgrade-height=$upgrade_height --upgrade-info=$upgrade_info --from alice --yes) +second_propose=$(get_txn_result "$second_propose") echo "second propose upgrade response: $second_propose" check_response_and_report "$second_propose" "proposed upgrade already exists" raw @@ -131,6 +143,7 @@ echo "upgrade height < current height" random_string upgrade_name height=1 propose=$(dcld tx dclupgrade propose-upgrade --name=$upgrade_name --upgrade-height=$height --upgrade-info=$upgrade_info --from alice --yes) +propose=$(get_txn_result "$propose") echo "propose upgrade response: $propose" check_response_and_report "$propose" "upgrade cannot be scheduled in the past" raw @@ -143,12 +156,14 @@ random_string upgrade_info echo "propose and reject upgrade" random_string upgrade_name propose=$(dcld tx dclupgrade propose-upgrade --name=$upgrade_name --upgrade-height=$upgrade_height --upgrade-info=$upgrade_info --from $trustee_account --yes) +propose=$(get_txn_result "$propose") echo "propose upgrade response: $propose" check_response "$propose" "\"code\": 0" test_divider approve=$(dcld tx dclupgrade approve-upgrade --name=$upgrade_name --from alice --yes) +approve=$(get_txn_result "$approve") echo "approve upgrade response: $approve" check_response "$approve" "\"code\": 0" @@ -163,24 +178,28 @@ check_response_and_report "$proposed_dclupgrade_query" "\"info\": \"$upgrade_inf test_divider reject=$(dcld tx dclupgrade reject-upgrade --name=$upgrade_name --from $trustee_account --yes) +reject=$(get_txn_result "$reject") echo "reject upgrade response: $reject" check_response "$reject" "\"code\": 0" test_divider approve=$(dcld tx dclupgrade approve-upgrade --name=$upgrade_name --from $trustee_account --yes) +approve=$(get_txn_result "$approve") echo "approve upgrade response: $approve" check_response "$approve" "\"code\": 0" test_divider reject=$(dcld tx dclupgrade reject-upgrade --name=$upgrade_name --from alice --yes) +reject=$(get_txn_result "$reject") echo "reject upgrade response: $reject" check_response "$reject" "\"code\": 0" test_divider second_reject=$(dcld tx dclupgrade reject-upgrade --name=$upgrade_name --from alice --yes) +second_reject=$(get_txn_result "$second_reject") echo "second_reject upgrade response: $reject" response_does_not_contain "$second_reject" "\"code\": 0" @@ -215,6 +234,7 @@ check_response "$approved_dclupgrade_query" "Not Found" test_divider reject=$(dcld tx dclupgrade reject-upgrade --name=$upgrade_name --from bob --yes) +reject=$(get_txn_result "$reject") echo "reject upgrade response: $reject" check_response "$reject" "\"code\": 0" @@ -251,6 +271,7 @@ random_string upgrade_info echo "propose upgrade's plan height bigger than block height" propose=$(dcld tx dclupgrade propose-upgrade --name=$upgrade_name --upgrade-height=$plan_height --upgrade-info=$upgrade_info --from jack --yes) +propose=$(get_txn_result "$propose") echo "propose upgrade response: $propose" check_response "$propose" "\"code\": 0" @@ -266,6 +287,7 @@ echo "Current height is $current_height" echo "approve upgrade's plan height less than block height" approve=$(dcld tx dclupgrade approve-upgrade --name=$upgrade_name --from alice --yes) +approve=$(get_txn_result "$approve") echo "approve upgrade response: $approve" check_response_and_report "$approve" "upgrade cannot be scheduled in the past" raw @@ -275,6 +297,7 @@ plan_height=$(expr $current_height + 3) echo "re-propose upgrade's plan height bigger than block height" propose=$(dcld tx dclupgrade propose-upgrade --name=$upgrade_name --upgrade-height=$plan_height --upgrade-info=$upgrade_info --from jack --yes) +propose=$(get_txn_result "$propose") echo "propose upgrade response: $propose" check_response "$propose" "\"code\": 0" @@ -297,6 +320,7 @@ random_string upgrade_info echo "propose upgrade's plan height bigger than block height" propose=$(dcld tx dclupgrade propose-upgrade --name=$upgrade_name --upgrade-height=$plan_height --upgrade-info=$upgrade_info --from jack --yes) +propose=$(get_txn_result "$propose") echo "propose upgrade response: $propose" check_response "$propose" "\"code\": 0" @@ -312,11 +336,13 @@ echo "Current height is $current_height" echo "approve upgrade's plan height less than block height" approve=$(dcld tx dclupgrade approve-upgrade --name=$upgrade_name --from alice --yes) +approve=$(get_txn_result "$approve") echo "approve upgrade response: $approve" check_response_and_report "$approve" "upgrade cannot be scheduled in the past" raw echo "re-propose upgrade's plan height bigger than block height" propose=$(dcld tx dclupgrade propose-upgrade --name=$upgrade_name --upgrade-height=$plan_height --upgrade-info=$upgrade_info --from jack --yes) +propose=$(get_txn_result "$propose") echo "propose upgrade response: $propose" check_response_and_report "$propose" "upgrade cannot be scheduled in the past" raw ########################################################################################################################################### @@ -334,10 +360,12 @@ test_divider echo "jack (Trustee) propose upgrade" result=$(dcld tx dclupgrade propose-upgrade --name=$upgrade_name --upgrade-height=$plan_height --upgrade-info=$upgrade_info --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "jack (Trustee) rejects upgrade" result=$(dcld tx dclupgrade reject-upgrade --name=$upgrade_name --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider diff --git a/integration_tests/cli/validator-demo.sh b/integration_tests/cli/validator-demo.sh index 2131ba7e9..9e64eea3f 100755 --- a/integration_tests/cli/validator-demo.sh +++ b/integration_tests/cli/validator-demo.sh @@ -64,7 +64,7 @@ docker exec $container /bin/sh -c " ./dcld config output json && ./dcld config node $node0conn && ./dcld config keyring-backend test && - ./dcld config broadcast-mode block" + ./dcld config broadcast-mode sync" test_divider @@ -88,8 +88,8 @@ bob_address="$(dcld keys show bob -a)" jack_address="$(dcld keys show jack -a)" echo "Create account for $account and Assign NodeAdmin role" echo $passphrase | dcld tx auth propose-add-account --address="$address" --pubkey="$pubkey" --roles="NodeAdmin" --from jack --yes -echo $passphrase | dcld tx auth approve-add-account --address="$address" --from alice --yes - +result=$(echo $passphrase | dcld tx auth approve-add-account --address="$address" --from alice --yes) +result=$(get_txn_result "$result") test_divider vaddress=$(docker exec $container ./dcld tendermint show-address) @@ -109,6 +109,7 @@ echo "$account Add Node \"$node_name\" to validator set" set -eu; echo test1234 | dcld tx validator add-node --pubkey='$vpubkey' --moniker="$node_name" --from="$account" --yes EOF result="$(docker exec "$container" /bin/sh -c "echo test1234 | ./dcld tx validator add-node --pubkey='$vpubkey' --moniker="$node_name" --from="$account" --yes")" +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -121,7 +122,7 @@ docker exec $container mkdir -p "$DCL_DIR"/cosmovisor/genesis/bin docker exec $container cp -f ./dcld "$DCL_DIR"/cosmovisor/genesis/bin/ echo "$account Start Node \"$node_name\"" -docker exec -d $container cosmovisor start +docker exec -d $container cosmovisor run start sleep 10 @@ -166,6 +167,7 @@ pid=$RANDOM productName="TestingProductLabel" echo "Add Model with VID: $vid PID: $pid" result=$(echo 'test1234' | dcld tx model add-model --vid=$vid --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel="$productName" --partNumber=1 --commissioningCustomFlow=0 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -200,10 +202,12 @@ test_divider # TEST PROPOSE AND REJECT DISABLE VALIDATOR echo "jack (Trustee) propose disable validator" result=$(dcld tx validator propose-disable-node --address="$validator_address" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "jack (Trustee) rejects disable validator" result=$(dcld tx validator reject-disable-node --address="$validator_address" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -229,6 +233,7 @@ check_response "$result" "Not Found" echo "node admin disables validator" result=$(docker exec "$container" /bin/sh -c "echo test1234 | dcld tx validator disable-node --from "$account" --yes") +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -236,6 +241,7 @@ test_divider echo "node admin doesn't add a new validator with new pubkey, because node admin already has disabled validator" result="$(docker exec "$container" /bin/sh -c "echo test1234 | ./dcld tx validator add-node --pubkey='$pubkey' --moniker="$node_name" --from="$account" --yes 2>&1 || true")" +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" test_divider @@ -261,6 +267,7 @@ test_divider echo "node admin enables validator" result=$(docker exec "$container" /bin/sh -c "echo test1234 | dcld tx validator enable-node --from "$account" --yes") +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -293,6 +300,7 @@ test_divider echo "Alice proposes to disable validator $address" result=$(dcld tx validator propose-disable-node --address="$validator_address" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -326,6 +334,7 @@ test_divider echo "Bob approves to disable validator $address" result=$(dcld tx validator approve-disable-node --address="$validator_address" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -334,12 +343,14 @@ test_divider echo "node admin doesn't add a new validator with new pubkey, because node admin already has disabled validator" result="$(docker exec "$container" /bin/sh -c "echo test1234 | ./dcld tx validator add-node --pubkey='$pubkey' --moniker="$node_name" --from="$account" --yes 2>&1 || true")" +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" test_divider echo "node admin enables validator" result=$(docker exec "$container" /bin/sh -c "echo test1234 | dcld tx validator enable-node --from "$account" --yes") +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -379,6 +390,7 @@ test_divider echo "Alice proposes to disable validator $address" result=$(dcld tx validator propose-disable-node --address="$validator_address" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -403,6 +415,7 @@ test_divider echo "Bob approves to disable validator $address" result=$(dcld tx validator approve-disable-node --address="$validator_address" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -410,6 +423,7 @@ test_divider echo "Bob cannot reject to disable validator $address, because Bob already rejected to disable validator" result=$(dcld tx validator reject-disable-node --address="$validator_address" --from bob --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo "$result" @@ -460,6 +474,7 @@ test_divider echo "node admin enables validator" result=$(docker exec "$container" /bin/sh -c "echo test1234 | dcld tx validator enable-node --from "$account" --yes") +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -485,16 +500,19 @@ test_divider echo "Jack proposes account for $new_trustee1" result=$(echo $passphrase | dcld tx auth propose-add-account --info="Jack is proposing this account" --address="$new_trustee_address1" --pubkey="$new_trustee_pubkey1" --roles="Trustee" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Alice approves account for \"$new_trustee1\"" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$new_trustee_address1" --info="Alice is approving this account" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Alice proposes to disable validator $address" result=$(dcld tx validator propose-disable-node --address="$validator_address" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -502,6 +520,7 @@ test_divider echo "Bob approves to disable validator $address" result=$(dcld tx validator approve-disable-node --address="$validator_address" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -509,6 +528,7 @@ test_divider echo "Bob can revote to reject disable validator $address even if Bob already approves to disable validator" result=$(dcld tx validator reject-disable-node --address="$validator_address" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -516,19 +536,23 @@ test_divider echo "Bob approves to disable validator $address" result=$(dcld tx validator approve-disable-node --address="$validator_address" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" echo "Alice proposes to revoke account for $new_trustee1" result=$(echo $passphrase | dcld tx auth propose-revoke-account --address="$new_trustee_address1" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Bob approves to revoke account for $new_trustee1" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$new_trustee_address1" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Jack approves to revoke account for $new_trustee1" result=$(echo $passphrase | dcld tx auth approve-revoke-account --address="$new_trustee_address1" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -559,6 +583,7 @@ test_divider echo "Bob rejects to disable validator $address" result=$(dcld tx validator reject-disable-node --address="$validator_address" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -566,6 +591,7 @@ test_divider echo "Bob cannot reject to disable validator $address, because Bob already rejected to disable validator" result=$(dcld tx validator reject-disable-node --address="$validator_address" --from bob --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo "$result" @@ -609,6 +635,7 @@ test_divider echo "Jack rejects to disable validator $address" result=$(dcld tx validator reject-disable-node --address="$validator_address" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -616,6 +643,7 @@ test_divider echo "Jack cannot reject to disable validator $address, because Jack already rejected to disable validator" result=$(dcld tx validator reject-disable-node --address="$validator_address" --from jack --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo "$result" @@ -650,6 +678,7 @@ echo "$result" echo "Alice proposes to disable validator $address" result=$(dcld tx validator propose-disable-node --address="$validator_address" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -685,6 +714,7 @@ test_divider echo "Alice proposes to revoke NodeAdmin $address" result=$(dcld tx auth propose-revoke-account --address="$address" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -692,6 +722,7 @@ test_divider echo "Bob approves to revoke NodeAdmin $address" result=$(dcld tx auth approve-revoke-account --address="$address" --from bob --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" diff --git a/integration_tests/cli/vendorinfo-demo-hex.sh b/integration_tests/cli/vendorinfo-demo-hex.sh index e5909f066..4732d816d 100644 --- a/integration_tests/cli/vendorinfo-demo-hex.sh +++ b/integration_tests/cli/vendorinfo-demo-hex.sh @@ -35,6 +35,7 @@ echo "Create VendorInfo Record for VID: $vid_in_hex_format" companyLegalName="XYZ IOT Devices Inc" vendorName="XYZ Devices" result=$(echo "test1234" | dcld tx vendorinfo add-vendor --vid=$vid_in_hex_format --companyLegalName="$companyLegalName" --vendorName="$vendorName" --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -55,6 +56,7 @@ echo "Update vendor info record for VID: $vid_in_hex_format" companyLegalName="ABC Subsidiary Corporation" vendorLandingPageURL="https://www.w3.org/" result=$(echo "test1234" | dcld tx vendorinfo update-vendor --vid=$vid_in_hex_format --companyLegalName="$companyLegalName" --vendorLandingPageURL=$vendorLandingPageURL --vendorName="$vendorName" --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -75,6 +77,7 @@ test_divider vid1_in_hex_format=0xA15 vid1=2581 result=$(echo "test1234" | dcld tx vendorinfo add-vendor --vid=$vid1_in_hex_format --companyLegalName="$companyLegalName" --vendorName="$vendorName" --from=$vendor_account --yes 2>&1) || true +result=$(get_txn_result "$result") echo "$result" check_response_and_report "$result" "transaction should be signed by a vendor account associated with the vendorID $vid1" @@ -82,6 +85,7 @@ test_divider # Update a vendor info record from a vendor account belonging to another vendor_account result=$(echo "test1234" | dcld tx vendorinfo update-vendor --vid=$vid_in_hex_format --companyLegalName="$companyLegalName" --vendorName="$vendorName" --from=$second_vendor_account --yes 2>&1) || true +result=$(get_txn_result "$result") echo "$result" check_response_and_report "$result" "transaction should be signed by a vendor account associated with the vendorID $vid" diff --git a/integration_tests/cli/vendorinfo-demo.sh b/integration_tests/cli/vendorinfo-demo.sh index 53d19776f..82a28a40a 100755 --- a/integration_tests/cli/vendorinfo-demo.sh +++ b/integration_tests/cli/vendorinfo-demo.sh @@ -52,6 +52,7 @@ companyLegalName="XYZ IOT Devices Inc" vendorName="XYZ Devices" schema_version_2=2 result=$(echo "test1234" | dcld tx vendorinfo add-vendor --vid=$vid --companyLegalName="$companyLegalName" --vendorName="$vendorName" --schemaVersion=$schema_version_2 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -80,6 +81,7 @@ test_divider # Update vendor info with empty optional fields echo "Update vendor info record for VID: $vid (with required fields only)" result=$(dcld tx vendorinfo update-vendor --vid=$vid --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -101,6 +103,7 @@ companyLegalName="ABC Subsidiary Corporation" vendorLandingPageURL="https://www.w3.org/" schema_version_3=3 result=$(echo "test1234" | dcld tx vendorinfo update-vendor --vid=$vid --companyLegalName="$companyLegalName" --vendorLandingPageURL=$vendorLandingPageURL --vendorName="$vendorName" --schemaVersion=$schema_version_3 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -121,6 +124,7 @@ test_divider # Create a vendor info record from a vendor account belonging to another vendor_account vid1=$RANDOM result=$(echo "test1234" | dcld tx vendorinfo add-vendor --vid=$vid1 --companyLegalName="$companyLegalName" --vendorName="$vendorName" --from=$vendor_account --yes 2>&1) || true +result=$(get_txn_result "$result") echo "$result" check_response_and_report "$result" "transaction should be signed by a vendor account associated with the vendorID $vid1" @@ -128,6 +132,7 @@ test_divider # Update a vendor info record from a vendor account belonging to another vendor_account result=$(echo "test1234" | dcld tx vendorinfo update-vendor --vid=$vid --companyLegalName="$companyLegalName" --vendorName="$vendorName" --from=$second_vendor_account --yes 2>&1) || true +result=$(get_txn_result "$result") echo "$result" check_response_and_report "$result" "transaction should be signed by a vendor account associated with the vendorID $vid" @@ -137,6 +142,7 @@ test_divider echo "Create a vendor info reacord from a vendor admin account" vid=$RANDOM result=$(echo "test1234" | dcld tx vendorinfo add-vendor --vid=$vid --companyLegalName="$companyLegalName" --vendorName="$vendorName" --from=$vendor_admin_account --yes 2>&1) || true +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" echo "$result" @@ -148,6 +154,7 @@ echo "Update the vendor info record by a vendor admin account" companyLegalName1="New Corp" vendorName1="New Vendor Name" result=$(echo "test1234" | dcld tx vendorinfo update-vendor --vid=$vid --companyLegalName="$companyLegalName1" --vendorName="$vendorName1" --from=$vendor_admin_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" diff --git a/integration_tests/constants/constants.go b/integration_tests/constants/constants.go index a395a9d29..a2daf0fb8 100644 --- a/integration_tests/constants/constants.go +++ b/integration_tests/constants/constants.go @@ -17,8 +17,9 @@ package testconstants import ( "github.com/cosmos/cosmos-sdk/codec" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" - "github.com/cosmos/cosmos-sdk/simapp" sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module/testutil" + "github.com/zigbee-alliance/distributed-compliance-ledger/x/common/types" ) @@ -34,7 +35,7 @@ func strToPubKey(pkStr string, cdc codec.Codec) cryptotypes.PubKey { var ( // default context // TODO issue 99: design test context better. - defEncConfig = simapp.MakeTestEncodingConfig() + defEncConfig = testutil.MakeTestEncodingConfig() // Base constants. JackAccount = "jack" @@ -146,28 +147,28 @@ var ( ProductIDs200 = append([]*types.Uint16Range{}, &types.Uint16Range{Min: 1, Max: 100}, &types.Uint16Range{Min: 101, Max: 200}) PubKey1 = strToPubKey( `{"@type":"/cosmos.crypto.secp256k1.PubKey","key":"Aw1XXHQ8i6JVNKsFQ9eQArJVt2GXEO0EBFsQL6XJ5BxY"}`, - defEncConfig.Marshaler, + defEncConfig.Codec, ) PubKey2 = strToPubKey( `{"@type":"/cosmos.crypto.secp256k1.PubKey","key":"A2wJ7uOEE5Zm04K52czFTXfDj1qF2mholzi1zOJVlKlr"}`, - defEncConfig.Marshaler, + defEncConfig.Codec, ) PubKey3 = strToPubKey( `{"@type":"/cosmos.crypto.secp256k1.PubKey","key":"A0GnKr6hItYE1A7dzoxNSMwMZuu1zauOLWAqJWen1RzF"}`, - defEncConfig.Marshaler, + defEncConfig.Codec, ) PubKey4 = strToPubKey( `{"@type":"/cosmos.crypto.secp256k1.PubKey","key":"AnQC2MkMN1TOQyAJ0zjakPDcak+5FLtEoL4yBsgFO8Xe"}`, - defEncConfig.Marshaler, + defEncConfig.Codec, ) Signer = Address1 ValidatorPubKey1 = strToPubKey( `{"@type":"/cosmos.crypto.ed25519.PubKey","key":"1e+1/jHGaJi0b2zgCN46eelKCYpKiuTgPN18mL3fzx8="}`, - defEncConfig.Marshaler, + defEncConfig.Codec, ) ValidatorPubKey2 = strToPubKey( `{"@type":"/cosmos.crypto.ed25519.PubKey","key":"NB8hcdxKYDCaPWR67OiUXUSltZfYYOWYryPDUdbWRlA="}`, - defEncConfig.Marshaler, + defEncConfig.Codec, ) ValidatorAddress1 = "cosmosvaloper156dzj776tf3lmsahgmtnrphflaqf7n58kug5qe" ValidatorAddress2 = "cosmosvaloper12tg2p3rjsaczddufmsjjrw9nvhg8wkc4hcz3zw" diff --git a/integration_tests/deploy/Dockerfile-build b/integration_tests/deploy/Dockerfile-build index 7f25159f3..bee4dce20 100644 --- a/integration_tests/deploy/Dockerfile-build +++ b/integration_tests/deploy/Dockerfile-build @@ -18,7 +18,7 @@ # The resulting image is used only for getting the build artifacts from # a container based on it. Containers based on this image are never run. ################################################################################ -FROM golang:bullseye +FROM golang:1.20.14-bullseye WORKDIR /go/src/dc-ledger COPY app ./app/ @@ -35,4 +35,4 @@ ARG DCL_VERSION ARG DCL_COMMIT RUN LEDGER_ENABLED=false make -RUN go install github.com/cosmos/cosmos-sdk/cosmovisor/cmd/cosmovisor@v1.0.0 +RUN go install github.com/cosmos/cosmos-sdk/cosmovisor/cmd/cosmovisor@v1.3.0 diff --git a/integration_tests/deploy/test_deploy.sh b/integration_tests/deploy/test_deploy.sh index 30e2eb9a4..285d86859 100755 --- a/integration_tests/deploy/test_deploy.sh +++ b/integration_tests/deploy/test_deploy.sh @@ -136,15 +136,18 @@ vn_admin_pubkey="$(docker_exec "$VN_NAME" ./dcld keys show "$vn_admin_name" -p)" # ensure that the genesis node is ready wait_for_height 2 15 normal "tcp://$GVN_IP:26657" -docker_exec "$GVN_NAME" ./dcld tx auth propose-add-account --address "$vn_admin_addr" --pubkey "$vn_admin_pubkey" --roles="NodeAdmin" --from "${GVN_NAME}_tr" --yes +result="$(docker_exec "$GVN_NAME" ./dcld tx auth propose-add-account --address "$vn_admin_addr" --pubkey "$vn_admin_pubkey" --roles="NodeAdmin" --from "${GVN_NAME}_tr" --yes)" +result=$(get_txn_result "$result") #dcld tx auth approve-add-account --address="$vn_admin_addr" --from alice --yes echo "$GVN_NAME: Add Node \"$VN_NAME\" to validator set" # ensure that the validator node is ready wait_for_height 4 30 normal "tcp://$VN_IP:26657" -docker_exec "$VN_NAME" ./dcld tx validator add-node --pubkey="$vn_pubkey" --moniker="$VN_NAME" --from="$vn_admin_name" --yes +result="$(docker_exec "$VN_NAME" ./dcld tx validator add-node --pubkey="$vn_pubkey" --moniker="$VN_NAME" --from="$vn_admin_name" --yes)" +result=$(get_txn_result "$result") +sleep 10 echo "Check node \"$VN_NAME\" is in the validator set" result=$(docker_exec "$GVN_NAME" ./dcld query validator all-nodes) check_response "$result" "\"moniker\": \"$VN_NAME\"" diff --git a/integration_tests/grpc_rest/compliance/helpers.go b/integration_tests/grpc_rest/compliance/helpers.go index 225c7f431..33df3f33f 100644 --- a/integration_tests/grpc_rest/compliance/helpers.go +++ b/integration_tests/grpc_rest/compliance/helpers.go @@ -18,15 +18,14 @@ import ( "context" "fmt" + tmrand "github.com/cometbft/cometbft/libs/rand" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" - test_dclauth "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/grpc_rest/dclauth" - test_model "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/grpc_rest/model" + testDclauth "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/grpc_rest/dclauth" + testModel "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/grpc_rest/model" "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/utils" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" compliancetypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) @@ -39,7 +38,7 @@ import ( TODO: provide tests for error cases */ -func GetAllComplianceInfo(suite *utils.TestSuite) (res []dclcompltypes.ComplianceInfo, err error) { +func GetAllComplianceInfo(suite *utils.TestSuite) (res []compliancetypes.ComplianceInfo, err error) { if suite.Rest { var resp compliancetypes.QueryAllComplianceInfoResponse err := suite.QueryREST("/dcl/compliance/compliance-info", &resp) @@ -68,8 +67,8 @@ func GetAllComplianceInfo(suite *utils.TestSuite) (res []dclcompltypes.Complianc func GetComplianceInfo( suite *utils.TestSuite, vid int32, pid int32, sv uint32, ct string, -) (*dclcompltypes.ComplianceInfo, error) { - var res dclcompltypes.ComplianceInfo +) (*compliancetypes.ComplianceInfo, error) { + var res compliancetypes.ComplianceInfo if suite.Rest { var resp compliancetypes.QueryGetComplianceInfoResponse @@ -113,8 +112,8 @@ func GetComplianceInfo( func GetComplianceInfoByHexVidAndPid( suite *utils.TestSuite, vid string, pid string, sv uint32, ct string, -) (*dclcompltypes.ComplianceInfo, error) { - var res dclcompltypes.ComplianceInfo +) (*compliancetypes.ComplianceInfo, error) { + var res compliancetypes.ComplianceInfo if suite.Rest { var resp compliancetypes.QueryGetComplianceInfoResponse @@ -506,19 +505,23 @@ func CDCertificateIDUpdateChangesOnlyOneComplianceInfo(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) jackName := testconstants.JackAccount jackKeyInfo, err := suite.Kr.Key(jackName) require.NoError(suite.T, err) - jackAccount, err := test_dclauth.GetAccount(suite, jackKeyInfo.GetAddress()) + address, err = jackKeyInfo.GetAddress() + require.NoError(suite.T, err) + jackAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account vid := int32(tmrand.Uint16()) vendorName := utils.RandString() - vendorAccount := test_dclauth.CreateVendorAccount( + vendorAccount := testDclauth.CreateVendorAccount( suite, vendorName, dclauthtypes.AccountRoles{dclauthtypes.Vendor}, @@ -534,7 +537,7 @@ func CDCertificateIDUpdateChangesOnlyOneComplianceInfo(suite *utils.TestSuite) { // Register new CertificationCenter account certCenter := utils.RandString() - certCenterAccount := test_dclauth.CreateAccount( + certCenterAccount := testDclauth.CreateAccount( suite, certCenter, dclauthtypes.AccountRoles{dclauthtypes.CertificationCenter}, @@ -550,21 +553,21 @@ func CDCertificateIDUpdateChangesOnlyOneComplianceInfo(suite *utils.TestSuite) { // Publish model info pid := int32(tmrand.Uint16()) - firstModel := test_model.NewMsgCreateModel(vid, pid, vendorAccount.Address) + firstModel := testModel.NewMsgCreateModel(vid, pid, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModel}, vendorName, vendorAccount) require.NoError(suite.T, err) // Publish modelVersion svFirst := tmrand.Uint32() svsFirst := utils.RandString() - firstModelVersion := test_model.NewMsgCreateModelVersion(vid, pid, svFirst, svsFirst, vendorAccount.Address) + firstModelVersion := testModel.NewMsgCreateModelVersion(vid, pid, svFirst, svsFirst, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModelVersion}, vendorName, vendorAccount) require.NoError(suite.T, err) // Publish second modelVersion svSecond := tmrand.Uint32() svsSecond := utils.RandString() - secondModelVersion := test_model.NewMsgCreateModelVersion(vid, pid, svSecond, svsSecond, vendorAccount.Address) + secondModelVersion := testModel.NewMsgCreateModelVersion(vid, pid, svSecond, svsSecond, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{secondModelVersion}, vendorName, vendorAccount) require.NoError(suite.T, err) @@ -609,17 +612,17 @@ func CDCertificateIDUpdateChangesOnlyOneComplianceInfo(suite *utils.TestSuite) { Vid: vid, Pid: pid, SoftwareVersion: svFirst, - CertificationType: dclcompltypes.ZigbeeCertificationType, + CertificationType: compliancetypes.ZigbeeCertificationType, CDCertificateId: cdCertificateIDNew, CDVersionNumber: "312", } _, err = suite.BuildAndBroadcastTx([]sdk.Msg{&updateComplianceInfoMsg}, certCenter, certCenterAccount) require.NoError(suite.T, err) - firstComplianceInfo, err := GetComplianceInfo(suite, vid, pid, svFirst, dclcompltypes.ZigbeeCertificationType) + firstComplianceInfo, err := GetComplianceInfo(suite, vid, pid, svFirst, compliancetypes.ZigbeeCertificationType) require.NoError(suite.T, err) - secondComplianceInfo, err := GetComplianceInfo(suite, vid, pid, svSecond, dclcompltypes.ZigbeeCertificationType) + secondComplianceInfo, err := GetComplianceInfo(suite, vid, pid, svSecond, compliancetypes.ZigbeeCertificationType) require.NoError(suite.T, err) assert.Equal(suite.T, cdCertificateIDNew, firstComplianceInfo.CDCertificateId) @@ -631,19 +634,23 @@ func DeleteComplianceInfoForAllCertStatuses(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) jackName := testconstants.JackAccount jackKeyInfo, err := suite.Kr.Key(jackName) require.NoError(suite.T, err) - jackAccount, err := test_dclauth.GetAccount(suite, jackKeyInfo.GetAddress()) + address, err = jackKeyInfo.GetAddress() + require.NoError(suite.T, err) + jackAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account vid := int32(tmrand.Uint16()) vendorName := utils.RandString() - vendorAccount := test_dclauth.CreateVendorAccount( + vendorAccount := testDclauth.CreateVendorAccount( suite, vendorName, dclauthtypes.AccountRoles{dclauthtypes.Vendor}, @@ -659,7 +666,7 @@ func DeleteComplianceInfoForAllCertStatuses(suite *utils.TestSuite) { // Register new CertificationCenter account certCenter := utils.RandString() - certCenterAccount := test_dclauth.CreateAccount( + certCenterAccount := testDclauth.CreateAccount( suite, certCenter, dclauthtypes.AccountRoles{dclauthtypes.CertificationCenter}, @@ -675,14 +682,14 @@ func DeleteComplianceInfoForAllCertStatuses(suite *utils.TestSuite) { // Publish model info pid := int32(tmrand.Uint16()) - firstModel := test_model.NewMsgCreateModel(vid, pid, vendorAccount.Address) + firstModel := testModel.NewMsgCreateModel(vid, pid, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModel}, vendorName, vendorAccount) require.NoError(suite.T, err) // Publish modelVersion sv := tmrand.Uint32() svs := utils.RandString() - firstModelVersion := test_model.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) + firstModelVersion := testModel.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModelVersion}, vendorName, vendorAccount) require.NoError(suite.T, err) @@ -721,7 +728,7 @@ func DeleteComplianceInfoForAllCertStatuses(suite *utils.TestSuite) { // Publish modelVersion sv = tmrand.Uint32() svs = utils.RandString() - secondModelVersion := test_model.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) + secondModelVersion := testModel.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{secondModelVersion}, vendorName, vendorAccount) require.NoError(suite.T, err) @@ -759,7 +766,7 @@ func DeleteComplianceInfoForAllCertStatuses(suite *utils.TestSuite) { // Publish modelVersion sv = tmrand.Uint32() svs = utils.RandString() - thirdModelVersion := test_model.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) + thirdModelVersion := testModel.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{thirdModelVersion}, vendorName, vendorAccount) require.NoError(suite.T, err) @@ -822,19 +829,23 @@ func DemoTrackCompliance(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) jackName := testconstants.JackAccount jackKeyInfo, err := suite.Kr.Key(jackName) require.NoError(suite.T, err) - jackAccount, err := test_dclauth.GetAccount(suite, jackKeyInfo.GetAddress()) + address, err = jackKeyInfo.GetAddress() + require.NoError(suite.T, err) + jackAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account vid := int32(tmrand.Uint16()) vendorName := utils.RandString() - vendorAccount := test_dclauth.CreateVendorAccount( + vendorAccount := testDclauth.CreateVendorAccount( suite, vendorName, dclauthtypes.AccountRoles{dclauthtypes.Vendor}, @@ -850,7 +861,7 @@ func DemoTrackCompliance(suite *utils.TestSuite) { // Register new CertificationCenter account certCenter := utils.RandString() - certCenterAccount := test_dclauth.CreateAccount( + certCenterAccount := testDclauth.CreateAccount( suite, certCenter, dclauthtypes.AccountRoles{dclauthtypes.CertificationCenter}, @@ -866,25 +877,25 @@ func DemoTrackCompliance(suite *utils.TestSuite) { // Publish model info pid := int32(tmrand.Uint16()) - firstModel := test_model.NewMsgCreateModel(vid, pid, vendorAccount.Address) + firstModel := testModel.NewMsgCreateModel(vid, pid, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModel}, vendorName, vendorAccount) require.NoError(suite.T, err) // Publish modelVersion sv := tmrand.Uint32() svs := utils.RandString() - firstModelVersion := test_model.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) + firstModelVersion := testModel.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModelVersion}, vendorName, vendorAccount) require.NoError(suite.T, err) // Check if model either certified or revoked before Compliance record was created - _, err = GetComplianceInfo(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + _, err = GetComplianceInfo(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) suite.AssertNotFound(err) - _, err = GetRevokedModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + _, err = GetRevokedModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) suite.AssertNotFound(err) - _, err = GetCertifiedModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + _, err = GetCertifiedModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) suite.AssertNotFound(err) - _, err = GetProvisionalModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + _, err = GetProvisionalModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) suite.AssertNotFound(err) // Certify model @@ -911,8 +922,8 @@ func DemoTrackCompliance(suite *utils.TestSuite) { require.True(suite.T, compliancetypes.ErrAlreadyCertified.Is(err)) // Check model is certified - complianceInfo, _ := GetComplianceInfo(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) - require.Equal(suite.T, dclcompltypes.ZigbeeCertificationType, complianceInfo.CertificationType) + complianceInfo, _ := GetComplianceInfo(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) + require.Equal(suite.T, compliancetypes.ZigbeeCertificationType, complianceInfo.CertificationType) require.Equal(suite.T, uint32(2), complianceInfo.SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, complianceInfo.Vid) require.Equal(suite.T, pid, complianceInfo.Pid) @@ -920,18 +931,18 @@ func DemoTrackCompliance(suite *utils.TestSuite) { require.Equal(suite.T, testconstants.CDCertificateID, complianceInfo.CDCertificateId) require.Equal(suite.T, certReason, complianceInfo.Reason) require.Equal(suite.T, certDate, complianceInfo.Date) - modelIsCertified, _ := GetCertifiedModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + modelIsCertified, _ := GetCertifiedModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.True(suite.T, modelIsCertified.Value) - modelIsRevoked, _ := GetRevokedModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + modelIsRevoked, _ := GetRevokedModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.False(suite.T, modelIsRevoked.Value) - modelIsProvisional, _ := GetProvisionalModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + modelIsProvisional, _ := GetProvisionalModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.False(suite.T, modelIsProvisional.Value) // Check device software compliance deviceSoftwareCompliance, _ := GetDeviceSoftwareCompliance(suite, testconstants.CDCertificateID) require.Equal(suite.T, testconstants.CDCertificateID, deviceSoftwareCompliance.CDCertificateId) require.Equal(suite.T, 1, len(deviceSoftwareCompliance.ComplianceInfo)) - require.Equal(suite.T, dclcompltypes.ZigbeeCertificationType, deviceSoftwareCompliance.ComplianceInfo[0].CertificationType) + require.Equal(suite.T, compliancetypes.ZigbeeCertificationType, deviceSoftwareCompliance.ComplianceInfo[0].CertificationType) require.Equal(suite.T, uint32(2), deviceSoftwareCompliance.ComplianceInfo[0].SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, deviceSoftwareCompliance.ComplianceInfo[0].Vid) require.Equal(suite.T, pid, deviceSoftwareCompliance.ComplianceInfo[0].Pid) @@ -952,7 +963,7 @@ func DemoTrackCompliance(suite *utils.TestSuite) { deviceSoftwareCompliances, _ := GetAllDeviceSoftwareCompliance(suite) require.Equal(suite.T, len(inputAllDeviceSoftwareCompliance)+1, len(deviceSoftwareCompliances)) - oldComplianceInfo, _ := GetComplianceInfo(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + oldComplianceInfo, _ := GetComplianceInfo(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) updateComplianceInfoMsg := compliancetypes.MsgUpdateComplianceInfo{ Creator: certCenterAccount.Address, @@ -961,7 +972,7 @@ func DemoTrackCompliance(suite *utils.TestSuite) { SoftwareVersion: sv, CDCertificateId: testconstants.CDCertificateID, CDVersionNumber: "312", - CertificationType: dclcompltypes.ZigbeeCertificationType, + CertificationType: compliancetypes.ZigbeeCertificationType, ProgramType: "new program type", Reason: "new reason", ParentChild: "child", @@ -969,7 +980,7 @@ func DemoTrackCompliance(suite *utils.TestSuite) { _, err = suite.BuildAndBroadcastTx([]sdk.Msg{&updateComplianceInfoMsg}, certCenter, certCenterAccount) require.NoError(suite.T, err) - updatedComplianceInfo, _ := GetComplianceInfo(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + updatedComplianceInfo, _ := GetComplianceInfo(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) // updated fields require.Equal(suite.T, updatedComplianceInfo.ProgramType, updateComplianceInfoMsg.ProgramType) @@ -1001,19 +1012,19 @@ func DemoTrackCompliance(suite *utils.TestSuite) { require.NoError(suite.T, err) // Check model is revoked - complianceInfo, _ = GetComplianceInfo(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) - require.Equal(suite.T, dclcompltypes.ZigbeeCertificationType, complianceInfo.CertificationType) + complianceInfo, _ = GetComplianceInfo(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) + require.Equal(suite.T, compliancetypes.ZigbeeCertificationType, complianceInfo.CertificationType) require.Equal(suite.T, uint32(3), complianceInfo.SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, complianceInfo.Vid) require.Equal(suite.T, pid, complianceInfo.Pid) require.Equal(suite.T, sv, complianceInfo.SoftwareVersion) require.Equal(suite.T, revocReason, complianceInfo.Reason) require.Equal(suite.T, revocDate, complianceInfo.Date) - modelIsCertified, _ = GetCertifiedModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + modelIsCertified, _ = GetCertifiedModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.False(suite.T, modelIsCertified.Value) - modelIsRevoked, _ = GetRevokedModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + modelIsRevoked, _ = GetRevokedModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.True(suite.T, modelIsRevoked.Value) - modelIsProvisional, _ = GetProvisionalModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + modelIsProvisional, _ = GetProvisionalModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.False(suite.T, modelIsProvisional.Value) // Check modek is revoked from the entity Device Software Compliance @@ -1034,14 +1045,14 @@ func DemoTrackCompliance(suite *utils.TestSuite) { // Publish model info pid = int32(tmrand.Uint16()) - secondModel := test_model.NewMsgCreateModel(vid, pid, vendorAccount.Address) + secondModel := testModel.NewMsgCreateModel(vid, pid, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{secondModel}, vendorName, vendorAccount) require.NoError(suite.T, err) // Publish model version sv = tmrand.Uint32() svs = utils.RandString() - secondModelVersion := test_model.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) + secondModelVersion := testModel.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{secondModelVersion}, vendorName, vendorAccount) require.NoError(suite.T, err) @@ -1074,9 +1085,9 @@ func DemoTrackCompliance(suite *utils.TestSuite) { require.NoError(suite.T, err) // Check model is certified - complianceInfo, _ = GetComplianceInfo(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + complianceInfo, _ = GetComplianceInfo(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) - require.Equal(suite.T, dclcompltypes.ZigbeeCertificationType, complianceInfo.CertificationType) + require.Equal(suite.T, compliancetypes.ZigbeeCertificationType, complianceInfo.CertificationType) require.Equal(suite.T, uint32(2), complianceInfo.SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, complianceInfo.Vid) require.Equal(suite.T, pid, complianceInfo.Pid) @@ -1095,18 +1106,18 @@ func DemoTrackCompliance(suite *utils.TestSuite) { require.Equal(suite.T, testconstants.ParentChild1, complianceInfo.ParentChild) require.Equal(suite.T, testconstants.CertificationIDOfSoftwareComponent, complianceInfo.CertificationIdOfSoftwareComponent) - modelIsCertified, _ = GetCertifiedModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + modelIsCertified, _ = GetCertifiedModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.True(suite.T, modelIsCertified.Value) - modelIsRevoked, _ = GetRevokedModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + modelIsRevoked, _ = GetRevokedModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.False(suite.T, modelIsRevoked.Value) - modelIsProvisional, _ = GetProvisionalModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + modelIsProvisional, _ = GetProvisionalModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.False(suite.T, modelIsProvisional.Value) // Check Device Software Compliance deviceSoftwareCompliance, _ = GetDeviceSoftwareCompliance(suite, testconstants.CDCertificateID) require.Equal(suite.T, testconstants.CDCertificateID, deviceSoftwareCompliance.CDCertificateId) require.Equal(suite.T, 1, len(deviceSoftwareCompliance.ComplianceInfo)) - require.Equal(suite.T, dclcompltypes.ZigbeeCertificationType, deviceSoftwareCompliance.ComplianceInfo[0].CertificationType) + require.Equal(suite.T, compliancetypes.ZigbeeCertificationType, deviceSoftwareCompliance.ComplianceInfo[0].CertificationType) require.Equal(suite.T, uint32(2), deviceSoftwareCompliance.ComplianceInfo[0].SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, deviceSoftwareCompliance.ComplianceInfo[0].Vid) require.Equal(suite.T, pid, deviceSoftwareCompliance.ComplianceInfo[0].Pid) @@ -1151,19 +1162,23 @@ func DemoTrackRevocation(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) jackName := testconstants.JackAccount jackKeyInfo, err := suite.Kr.Key(jackName) require.NoError(suite.T, err) - jackAccount, err := test_dclauth.GetAccount(suite, jackKeyInfo.GetAddress()) + address, err = jackKeyInfo.GetAddress() + require.NoError(suite.T, err) + jackAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account vid := int32(tmrand.Uint16()) vendorName := utils.RandString() - vendorAccount := test_dclauth.CreateVendorAccount( + vendorAccount := testDclauth.CreateVendorAccount( suite, vendorName, dclauthtypes.AccountRoles{dclauthtypes.Vendor}, @@ -1179,7 +1194,7 @@ func DemoTrackRevocation(suite *utils.TestSuite) { // Register new CertificationCenter account certCenter := utils.RandString() - certCenterAccount := test_dclauth.CreateAccount( + certCenterAccount := testDclauth.CreateAccount( suite, certCenter, dclauthtypes.AccountRoles{dclauthtypes.CertificationCenter}, @@ -1195,14 +1210,14 @@ func DemoTrackRevocation(suite *utils.TestSuite) { // Publish model info pid := int32(tmrand.Uint16()) - firstModel := test_model.NewMsgCreateModel(vid, pid, vendorAccount.Address) + firstModel := testModel.NewMsgCreateModel(vid, pid, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModel}, vendorName, vendorAccount) require.NoError(suite.T, err) // Publish modelVersion sv := tmrand.Uint32() svs := utils.RandString() - firstModelVersion := test_model.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) + firstModelVersion := testModel.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModelVersion}, vendorName, vendorAccount) require.NoError(suite.T, err) @@ -1229,19 +1244,19 @@ func DemoTrackRevocation(suite *utils.TestSuite) { require.True(suite.T, compliancetypes.ErrAlreadyRevoked.Is(err)) // Check non-certified model is revoked - complianceInfo, _ := GetComplianceInfo(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) - require.Equal(suite.T, dclcompltypes.ZigbeeCertificationType, complianceInfo.CertificationType) + complianceInfo, _ := GetComplianceInfo(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) + require.Equal(suite.T, compliancetypes.ZigbeeCertificationType, complianceInfo.CertificationType) require.Equal(suite.T, uint32(3), complianceInfo.SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, complianceInfo.Vid) require.Equal(suite.T, pid, complianceInfo.Pid) require.Equal(suite.T, sv, complianceInfo.SoftwareVersion) require.Equal(suite.T, revocReason, complianceInfo.Reason) require.Equal(suite.T, revocDate, complianceInfo.Date) - _, err = GetCertifiedModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + _, err = GetCertifiedModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) suite.AssertNotFound(err) - modelIsRevoked, _ := GetRevokedModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + modelIsRevoked, _ := GetRevokedModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.True(suite.T, modelIsRevoked.Value) - _, err = GetProvisionalModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + _, err = GetProvisionalModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) suite.AssertNotFound(err) // Get all @@ -1275,8 +1290,8 @@ func DemoTrackRevocation(suite *utils.TestSuite) { require.NoError(suite.T, err) // Check model is certified - complianceInfo, _ = GetComplianceInfo(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) - require.Equal(suite.T, dclcompltypes.ZigbeeCertificationType, complianceInfo.CertificationType) + complianceInfo, _ = GetComplianceInfo(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) + require.Equal(suite.T, compliancetypes.ZigbeeCertificationType, complianceInfo.CertificationType) require.Equal(suite.T, uint32(2), complianceInfo.SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, complianceInfo.Vid) require.Equal(suite.T, pid, complianceInfo.Pid) @@ -1284,18 +1299,18 @@ func DemoTrackRevocation(suite *utils.TestSuite) { require.Equal(suite.T, testconstants.CDCertificateID, complianceInfo.CDCertificateId) require.Equal(suite.T, certReason, complianceInfo.Reason) require.Equal(suite.T, certDate, complianceInfo.Date) - certifiedModel, _ := GetCertifiedModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + certifiedModel, _ := GetCertifiedModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.True(suite.T, certifiedModel.Value) - revokedModel, _ := GetRevokedModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + revokedModel, _ := GetRevokedModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.False(suite.T, revokedModel.Value) - provisionalModel, _ := GetProvisionalModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + provisionalModel, _ := GetProvisionalModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.False(suite.T, provisionalModel.Value) // Check Device Software Compliance deviceSoftwareCompliance, _ := GetDeviceSoftwareCompliance(suite, testconstants.CDCertificateID) require.Equal(suite.T, testconstants.CDCertificateID, deviceSoftwareCompliance.CDCertificateId) require.Equal(suite.T, 2, len(deviceSoftwareCompliance.ComplianceInfo)) - require.Equal(suite.T, dclcompltypes.ZigbeeCertificationType, deviceSoftwareCompliance.ComplianceInfo[1].CertificationType) + require.Equal(suite.T, compliancetypes.ZigbeeCertificationType, deviceSoftwareCompliance.ComplianceInfo[1].CertificationType) require.Equal(suite.T, uint32(2), deviceSoftwareCompliance.ComplianceInfo[1].SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, deviceSoftwareCompliance.ComplianceInfo[1].Vid) require.Equal(suite.T, pid, deviceSoftwareCompliance.ComplianceInfo[1].Pid) @@ -1330,19 +1345,23 @@ func DemoTrackProvision(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) jackName := testconstants.JackAccount jackKeyInfo, err := suite.Kr.Key(jackName) require.NoError(suite.T, err) - jackAccount, err := test_dclauth.GetAccount(suite, jackKeyInfo.GetAddress()) + address, err = jackKeyInfo.GetAddress() + require.NoError(suite.T, err) + jackAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account vid := int32(tmrand.Uint16()) vendorName := utils.RandString() - vendorAccount := test_dclauth.CreateVendorAccount( + vendorAccount := testDclauth.CreateVendorAccount( suite, vendorName, dclauthtypes.AccountRoles{dclauthtypes.Vendor}, @@ -1358,7 +1377,7 @@ func DemoTrackProvision(suite *utils.TestSuite) { // Register new CertificationCenter account certCenter := utils.RandString() - certCenterAccount := test_dclauth.CreateAccount( + certCenterAccount := testDclauth.CreateAccount( suite, certCenter, dclauthtypes.AccountRoles{dclauthtypes.CertificationCenter}, @@ -1376,12 +1395,12 @@ func DemoTrackProvision(suite *utils.TestSuite) { sv := tmrand.Uint32() svs := utils.RandString() - firstModel := test_model.NewMsgCreateModel(vid, pid, vendorAccount.Address) + firstModel := testModel.NewMsgCreateModel(vid, pid, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModel}, vendorName, vendorAccount) require.NoError(suite.T, err) // Publish modelVersion - firstModelVersion := test_model.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) + firstModelVersion := testModel.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModelVersion}, vendorName, vendorAccount) require.NoError(suite.T, err) @@ -1408,8 +1427,8 @@ func DemoTrackProvision(suite *utils.TestSuite) { require.True(suite.T, compliancetypes.ErrAlreadyProvisional.Is(err)) // Check non-existent model is provisioned - complianceInfo, _ := GetComplianceInfo(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) - require.Equal(suite.T, dclcompltypes.MatterCertificationType, complianceInfo.CertificationType) + complianceInfo, _ := GetComplianceInfo(suite, vid, pid, sv, compliancetypes.MatterCertificationType) + require.Equal(suite.T, compliancetypes.MatterCertificationType, complianceInfo.CertificationType) require.Equal(suite.T, uint32(1), complianceInfo.SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, complianceInfo.Vid) require.Equal(suite.T, pid, complianceInfo.Pid) @@ -1417,11 +1436,11 @@ func DemoTrackProvision(suite *utils.TestSuite) { require.Equal(suite.T, testconstants.CDCertificateID, complianceInfo.CDCertificateId) require.Equal(suite.T, provReason, complianceInfo.Reason) require.Equal(suite.T, provDate, complianceInfo.Date) - _, err = GetCertifiedModel(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) + _, err = GetCertifiedModel(suite, vid, pid, sv, compliancetypes.MatterCertificationType) suite.AssertNotFound(err) - _, err = GetRevokedModel(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) + _, err = GetRevokedModel(suite, vid, pid, sv, compliancetypes.MatterCertificationType) suite.AssertNotFound(err) - provisionModel, _ := GetProvisionalModel(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) + provisionModel, _ := GetProvisionalModel(suite, vid, pid, sv, compliancetypes.MatterCertificationType) require.True(suite.T, provisionModel.Value) // Get all @@ -1454,8 +1473,8 @@ func DemoTrackProvision(suite *utils.TestSuite) { require.NoError(suite.T, err) // Check model is certified - complianceInfo, _ = GetComplianceInfo(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) - require.Equal(suite.T, dclcompltypes.MatterCertificationType, complianceInfo.CertificationType) + complianceInfo, _ = GetComplianceInfo(suite, vid, pid, sv, compliancetypes.MatterCertificationType) + require.Equal(suite.T, compliancetypes.MatterCertificationType, complianceInfo.CertificationType) require.Equal(suite.T, uint32(2), complianceInfo.SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, complianceInfo.Vid) require.Equal(suite.T, pid, complianceInfo.Pid) @@ -1463,18 +1482,18 @@ func DemoTrackProvision(suite *utils.TestSuite) { require.Equal(suite.T, testconstants.CDCertificateID, complianceInfo.CDCertificateId) require.Equal(suite.T, certReason, complianceInfo.Reason) require.Equal(suite.T, certDate, complianceInfo.Date) - certifiedModel, _ := GetCertifiedModel(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) + certifiedModel, _ := GetCertifiedModel(suite, vid, pid, sv, compliancetypes.MatterCertificationType) require.True(suite.T, certifiedModel.Value) - revokedModel, _ := GetRevokedModel(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) + revokedModel, _ := GetRevokedModel(suite, vid, pid, sv, compliancetypes.MatterCertificationType) require.False(suite.T, revokedModel.Value) - provisionalModel, _ := GetProvisionalModel(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) + provisionalModel, _ := GetProvisionalModel(suite, vid, pid, sv, compliancetypes.MatterCertificationType) require.False(suite.T, provisionalModel.Value) // Check Device Software Compliance deviceSoftwareCompliance, _ := GetDeviceSoftwareCompliance(suite, testconstants.CDCertificateID) require.Equal(suite.T, testconstants.CDCertificateID, deviceSoftwareCompliance.CDCertificateId) require.Equal(suite.T, 3, len(deviceSoftwareCompliance.ComplianceInfo)) - require.Equal(suite.T, dclcompltypes.MatterCertificationType, deviceSoftwareCompliance.ComplianceInfo[2].CertificationType) + require.Equal(suite.T, compliancetypes.MatterCertificationType, deviceSoftwareCompliance.ComplianceInfo[2].CertificationType) require.Equal(suite.T, uint32(2), deviceSoftwareCompliance.ComplianceInfo[2].SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, deviceSoftwareCompliance.ComplianceInfo[2].Vid) require.Equal(suite.T, pid, deviceSoftwareCompliance.ComplianceInfo[2].Pid) @@ -1504,11 +1523,11 @@ func DemoTrackProvision(suite *utils.TestSuite) { sv = tmrand.Uint32() svs = utils.RandString() - secondModel := test_model.NewMsgCreateModel(vid, pid, vendorAccount.Address) + secondModel := testModel.NewMsgCreateModel(vid, pid, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{secondModel}, vendorName, vendorAccount) require.NoError(suite.T, err) - secondModelVersion := test_model.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) + secondModelVersion := testModel.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{secondModelVersion}, vendorName, vendorAccount) require.NoError(suite.T, err) @@ -1541,9 +1560,9 @@ func DemoTrackProvision(suite *utils.TestSuite) { require.NoError(suite.T, err) // Check non-existed model is provisioned - complianceInfo, _ = GetComplianceInfo(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) + complianceInfo, _ = GetComplianceInfo(suite, vid, pid, sv, compliancetypes.MatterCertificationType) - require.Equal(suite.T, dclcompltypes.MatterCertificationType, complianceInfo.CertificationType) + require.Equal(suite.T, compliancetypes.MatterCertificationType, complianceInfo.CertificationType) require.Equal(suite.T, uint32(1), complianceInfo.SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, complianceInfo.Vid) require.Equal(suite.T, pid, complianceInfo.Pid) @@ -1562,11 +1581,11 @@ func DemoTrackProvision(suite *utils.TestSuite) { require.Equal(suite.T, testconstants.ParentChild1, complianceInfo.ParentChild) require.Equal(suite.T, testconstants.CertificationIDOfSoftwareComponent, complianceInfo.CertificationIdOfSoftwareComponent) - _, err = GetCertifiedModel(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) + _, err = GetCertifiedModel(suite, vid, pid, sv, compliancetypes.MatterCertificationType) suite.AssertNotFound(err) - _, err = GetRevokedModel(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) + _, err = GetRevokedModel(suite, vid, pid, sv, compliancetypes.MatterCertificationType) suite.AssertNotFound(err) - provisionModel, _ = GetProvisionalModel(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) + provisionModel, _ = GetProvisionalModel(suite, vid, pid, sv, compliancetypes.MatterCertificationType) require.True(suite.T, provisionModel.Value) // Get all @@ -1605,10 +1624,10 @@ func DemoTrackProvision(suite *utils.TestSuite) { require.NoError(suite.T, err) // Check model is certified - complianceInfo, _ = GetComplianceInfo(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) + complianceInfo, _ = GetComplianceInfo(suite, vid, pid, sv, compliancetypes.MatterCertificationType) // After certify model tx some fields will be update and another fields should be no change - require.Equal(suite.T, dclcompltypes.MatterCertificationType, complianceInfo.CertificationType) + require.Equal(suite.T, compliancetypes.MatterCertificationType, complianceInfo.CertificationType) require.Equal(suite.T, uint32(2), complianceInfo.SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, complianceInfo.Vid) require.Equal(suite.T, pid, complianceInfo.Pid) @@ -1627,18 +1646,18 @@ func DemoTrackProvision(suite *utils.TestSuite) { require.Equal(suite.T, testconstants.Transport, complianceInfo.Transport) require.Equal(suite.T, testconstants.ParentChild1, complianceInfo.ParentChild) - certifiedModel, _ = GetCertifiedModel(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) + certifiedModel, _ = GetCertifiedModel(suite, vid, pid, sv, compliancetypes.MatterCertificationType) require.True(suite.T, certifiedModel.Value) - revokedModel, _ = GetRevokedModel(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) + revokedModel, _ = GetRevokedModel(suite, vid, pid, sv, compliancetypes.MatterCertificationType) require.False(suite.T, revokedModel.Value) - provisionalModel, _ = GetProvisionalModel(suite, vid, pid, sv, dclcompltypes.MatterCertificationType) + provisionalModel, _ = GetProvisionalModel(suite, vid, pid, sv, compliancetypes.MatterCertificationType) require.False(suite.T, provisionalModel.Value) // Check Device Software Compliance deviceSoftwareCompliance, _ = GetDeviceSoftwareCompliance(suite, testconstants.CDCertificateID) require.Equal(suite.T, 4, len(deviceSoftwareCompliance.ComplianceInfo)) require.Equal(suite.T, testconstants.CDCertificateID, deviceSoftwareCompliance.CDCertificateId) - require.Equal(suite.T, dclcompltypes.MatterCertificationType, deviceSoftwareCompliance.ComplianceInfo[3].CertificationType) + require.Equal(suite.T, compliancetypes.MatterCertificationType, deviceSoftwareCompliance.ComplianceInfo[3].CertificationType) require.Equal(suite.T, uint32(2), deviceSoftwareCompliance.ComplianceInfo[3].SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, deviceSoftwareCompliance.ComplianceInfo[3].Vid) require.Equal(suite.T, pid, deviceSoftwareCompliance.ComplianceInfo[3].Pid) @@ -1675,19 +1694,23 @@ func DemoTrackComplianceWithHexVidAndPid(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) jackName := testconstants.JackAccount jackKeyInfo, err := suite.Kr.Key(jackName) require.NoError(suite.T, err) - jackAccount, err := test_dclauth.GetAccount(suite, jackKeyInfo.GetAddress()) + address, err = jackKeyInfo.GetAddress() + require.NoError(suite.T, err) + jackAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account var vid int32 = 0xA13 vendorName := utils.RandString() - vendorAccount := test_dclauth.CreateVendorAccount( + vendorAccount := testDclauth.CreateVendorAccount( suite, vendorName, dclauthtypes.AccountRoles{dclauthtypes.Vendor}, @@ -1704,7 +1727,7 @@ func DemoTrackComplianceWithHexVidAndPid(suite *utils.TestSuite) { // Register new CertificationCenter account certCenter := utils.RandString() - certCenterAccount := test_dclauth.CreateAccount( + certCenterAccount := testDclauth.CreateAccount( suite, certCenter, dclauthtypes.AccountRoles{dclauthtypes.CertificationCenter}, @@ -1720,25 +1743,25 @@ func DemoTrackComplianceWithHexVidAndPid(suite *utils.TestSuite) { // Publish model info var pid int32 = 0xA11 - firstModel := test_model.NewMsgCreateModel(vid, pid, vendorAccount.Address) + firstModel := testModel.NewMsgCreateModel(vid, pid, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModel}, vendorName, vendorAccount) require.NoError(suite.T, err) // Publish modelVersion sv := tmrand.Uint32() svs := utils.RandString() - firstModelVersion := test_model.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) + firstModelVersion := testModel.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModelVersion}, vendorName, vendorAccount) require.NoError(suite.T, err) // Check if model either certified or revoked before Compliance record was created - _, err = GetComplianceInfoByHexVidAndPid(suite, testconstants.TestVID1String, testconstants.TestPID1String, sv, dclcompltypes.ZigbeeCertificationType) + _, err = GetComplianceInfoByHexVidAndPid(suite, testconstants.TestVID1String, testconstants.TestPID1String, sv, compliancetypes.ZigbeeCertificationType) suite.AssertNotFound(err) - _, err = GetRevokedModelByHexVidAndPid(suite, testconstants.TestVID1String, testconstants.TestPID1String, sv, dclcompltypes.ZigbeeCertificationType) + _, err = GetRevokedModelByHexVidAndPid(suite, testconstants.TestVID1String, testconstants.TestPID1String, sv, compliancetypes.ZigbeeCertificationType) suite.AssertNotFound(err) - _, err = GetCertifiedModelByHexVidAndPid(suite, testconstants.TestVID1String, testconstants.TestPID1String, sv, dclcompltypes.ZigbeeCertificationType) + _, err = GetCertifiedModelByHexVidAndPid(suite, testconstants.TestVID1String, testconstants.TestPID1String, sv, compliancetypes.ZigbeeCertificationType) suite.AssertNotFound(err) - _, err = GetProvisionalModelByHexVidAndPid(suite, testconstants.TestVID1String, testconstants.TestPID1String, sv, dclcompltypes.ZigbeeCertificationType) + _, err = GetProvisionalModelByHexVidAndPid(suite, testconstants.TestVID1String, testconstants.TestPID1String, sv, compliancetypes.ZigbeeCertificationType) suite.AssertNotFound(err) // Certify model @@ -1764,8 +1787,8 @@ func DemoTrackComplianceWithHexVidAndPid(suite *utils.TestSuite) { require.True(suite.T, compliancetypes.ErrAlreadyCertified.Is(err)) // Check model is certified - complianceInfo, _ := GetComplianceInfoByHexVidAndPid(suite, testconstants.TestVID1String, testconstants.TestPID1String, sv, dclcompltypes.ZigbeeCertificationType) - require.Equal(suite.T, dclcompltypes.ZigbeeCertificationType, complianceInfo.CertificationType) + complianceInfo, _ := GetComplianceInfoByHexVidAndPid(suite, testconstants.TestVID1String, testconstants.TestPID1String, sv, compliancetypes.ZigbeeCertificationType) + require.Equal(suite.T, compliancetypes.ZigbeeCertificationType, complianceInfo.CertificationType) require.Equal(suite.T, uint32(2), complianceInfo.SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, complianceInfo.Vid) require.Equal(suite.T, pid, complianceInfo.Pid) @@ -1773,11 +1796,11 @@ func DemoTrackComplianceWithHexVidAndPid(suite *utils.TestSuite) { require.Equal(suite.T, testconstants.CDCertificateID, complianceInfo.CDCertificateId) require.Equal(suite.T, certReason, complianceInfo.Reason) require.Equal(suite.T, certDate, complianceInfo.Date) - modelIsCertified, _ := GetCertifiedModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + modelIsCertified, _ := GetCertifiedModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.True(suite.T, modelIsCertified.Value) - modelIsRevoked, _ := GetRevokedModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + modelIsRevoked, _ := GetRevokedModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.False(suite.T, modelIsRevoked.Value) - modelIsProvisional, _ := GetProvisionalModel(suite, vid, pid, sv, dclcompltypes.ZigbeeCertificationType) + modelIsProvisional, _ := GetProvisionalModel(suite, vid, pid, sv, compliancetypes.ZigbeeCertificationType) require.False(suite.T, modelIsProvisional.Value) } @@ -1786,19 +1809,23 @@ func DemoTrackRevocationWithHexVidAndPid(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) jackName := testconstants.JackAccount jackKeyInfo, err := suite.Kr.Key(jackName) require.NoError(suite.T, err) - jackAccount, err := test_dclauth.GetAccount(suite, jackKeyInfo.GetAddress()) + address, err = jackKeyInfo.GetAddress() + require.NoError(suite.T, err) + jackAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account var vid int32 = 0xA14 vendorName := utils.RandString() - vendorAccount := test_dclauth.CreateVendorAccount( + vendorAccount := testDclauth.CreateVendorAccount( suite, vendorName, dclauthtypes.AccountRoles{dclauthtypes.Vendor}, @@ -1815,7 +1842,7 @@ func DemoTrackRevocationWithHexVidAndPid(suite *utils.TestSuite) { // Register new CertificationCenter account certCenter := utils.RandString() - certCenterAccount := test_dclauth.CreateAccount( + certCenterAccount := testDclauth.CreateAccount( suite, certCenter, dclauthtypes.AccountRoles{dclauthtypes.CertificationCenter}, @@ -1831,14 +1858,14 @@ func DemoTrackRevocationWithHexVidAndPid(suite *utils.TestSuite) { // Publish model info var pid int32 = 0xA15 - firstModel := test_model.NewMsgCreateModel(vid, pid, vendorAccount.Address) + firstModel := testModel.NewMsgCreateModel(vid, pid, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModel}, vendorName, vendorAccount) require.NoError(suite.T, err) // Publish modelVersion sv := tmrand.Uint32() svs := utils.RandString() - firstModelVersion := test_model.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) + firstModelVersion := testModel.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModelVersion}, vendorName, vendorAccount) require.NoError(suite.T, err) @@ -1865,19 +1892,19 @@ func DemoTrackRevocationWithHexVidAndPid(suite *utils.TestSuite) { require.True(suite.T, compliancetypes.ErrAlreadyRevoked.Is(err)) // Check non-certified model is revoked - complianceInfo, _ := GetComplianceInfoByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, dclcompltypes.ZigbeeCertificationType) - require.Equal(suite.T, dclcompltypes.ZigbeeCertificationType, complianceInfo.CertificationType) + complianceInfo, _ := GetComplianceInfoByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, compliancetypes.ZigbeeCertificationType) + require.Equal(suite.T, compliancetypes.ZigbeeCertificationType, complianceInfo.CertificationType) require.Equal(suite.T, uint32(3), complianceInfo.SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, complianceInfo.Vid) require.Equal(suite.T, pid, complianceInfo.Pid) require.Equal(suite.T, sv, complianceInfo.SoftwareVersion) require.Equal(suite.T, revocReason, complianceInfo.Reason) require.Equal(suite.T, revocDate, complianceInfo.Date) - _, err = GetCertifiedModelByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, dclcompltypes.ZigbeeCertificationType) + _, err = GetCertifiedModelByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, compliancetypes.ZigbeeCertificationType) suite.AssertNotFound(err) - modelIsRevoked, _ := GetRevokedModelByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, dclcompltypes.ZigbeeCertificationType) + modelIsRevoked, _ := GetRevokedModelByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, compliancetypes.ZigbeeCertificationType) require.True(suite.T, modelIsRevoked.Value) - _, err = GetProvisionalModelByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, dclcompltypes.ZigbeeCertificationType) + _, err = GetProvisionalModelByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, compliancetypes.ZigbeeCertificationType) suite.AssertNotFound(err) // Certify model @@ -1899,8 +1926,8 @@ func DemoTrackRevocationWithHexVidAndPid(suite *utils.TestSuite) { require.NoError(suite.T, err) // Check model is certified - complianceInfo, _ = GetComplianceInfoByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, dclcompltypes.ZigbeeCertificationType) - require.Equal(suite.T, dclcompltypes.ZigbeeCertificationType, complianceInfo.CertificationType) + complianceInfo, _ = GetComplianceInfoByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, compliancetypes.ZigbeeCertificationType) + require.Equal(suite.T, compliancetypes.ZigbeeCertificationType, complianceInfo.CertificationType) require.Equal(suite.T, uint32(2), complianceInfo.SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, complianceInfo.Vid) require.Equal(suite.T, pid, complianceInfo.Pid) @@ -1908,11 +1935,11 @@ func DemoTrackRevocationWithHexVidAndPid(suite *utils.TestSuite) { require.Equal(suite.T, testconstants.CDCertificateID, complianceInfo.CDCertificateId) require.Equal(suite.T, certReason, complianceInfo.Reason) require.Equal(suite.T, certDate, complianceInfo.Date) - certifiedModel, _ := GetCertifiedModelByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, dclcompltypes.ZigbeeCertificationType) + certifiedModel, _ := GetCertifiedModelByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, compliancetypes.ZigbeeCertificationType) require.True(suite.T, certifiedModel.Value) - revokedModel, _ := GetRevokedModelByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, dclcompltypes.ZigbeeCertificationType) + revokedModel, _ := GetRevokedModelByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, compliancetypes.ZigbeeCertificationType) require.False(suite.T, revokedModel.Value) - provisionalModel, _ := GetProvisionalModelByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, dclcompltypes.ZigbeeCertificationType) + provisionalModel, _ := GetProvisionalModelByHexVidAndPid(suite, testconstants.TestVID2String, testconstants.TestPID2String, sv, compliancetypes.ZigbeeCertificationType) require.False(suite.T, provisionalModel.Value) } @@ -1921,19 +1948,23 @@ func DemoTrackProvisionByHexVidAndPid(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) jackName := testconstants.JackAccount jackKeyInfo, err := suite.Kr.Key(jackName) require.NoError(suite.T, err) - jackAccount, err := test_dclauth.GetAccount(suite, jackKeyInfo.GetAddress()) + address, err = jackKeyInfo.GetAddress() + require.NoError(suite.T, err) + jackAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account var vid int32 = 0xA16 vendorName := utils.RandString() - vendorAccount := test_dclauth.CreateVendorAccount( + vendorAccount := testDclauth.CreateVendorAccount( suite, vendorName, dclauthtypes.AccountRoles{dclauthtypes.Vendor}, @@ -1949,7 +1980,7 @@ func DemoTrackProvisionByHexVidAndPid(suite *utils.TestSuite) { // Register new CertificationCenter account certCenter := utils.RandString() - certCenterAccount := test_dclauth.CreateAccount( + certCenterAccount := testDclauth.CreateAccount( suite, certCenter, dclauthtypes.AccountRoles{dclauthtypes.CertificationCenter}, @@ -1968,12 +1999,12 @@ func DemoTrackProvisionByHexVidAndPid(suite *utils.TestSuite) { svs := utils.RandString() // Publish model info - firstModel := test_model.NewMsgCreateModel(vid, pid, vendorAccount.Address) + firstModel := testModel.NewMsgCreateModel(vid, pid, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModel}, vendorName, vendorAccount) require.NoError(suite.T, err) // Publish modelVersion - firstModelVersion := test_model.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) + firstModelVersion := testModel.NewMsgCreateModelVersion(vid, pid, sv, svs, vendorAccount.Address) _, err = suite.BuildAndBroadcastTx([]sdk.Msg{firstModelVersion}, vendorName, vendorAccount) require.NoError(suite.T, err) @@ -2000,8 +2031,8 @@ func DemoTrackProvisionByHexVidAndPid(suite *utils.TestSuite) { require.True(suite.T, compliancetypes.ErrAlreadyProvisional.Is(err)) // Check non-existent model is provisioned - complianceInfo, _ := GetComplianceInfoByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, dclcompltypes.MatterCertificationType) - require.Equal(suite.T, dclcompltypes.MatterCertificationType, complianceInfo.CertificationType) + complianceInfo, _ := GetComplianceInfoByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, compliancetypes.MatterCertificationType) + require.Equal(suite.T, compliancetypes.MatterCertificationType, complianceInfo.CertificationType) require.Equal(suite.T, uint32(1), complianceInfo.SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, complianceInfo.Vid) require.Equal(suite.T, pid, complianceInfo.Pid) @@ -2009,11 +2040,11 @@ func DemoTrackProvisionByHexVidAndPid(suite *utils.TestSuite) { require.Equal(suite.T, testconstants.CDCertificateID, complianceInfo.CDCertificateId) require.Equal(suite.T, provReason, complianceInfo.Reason) require.Equal(suite.T, provDate, complianceInfo.Date) - _, err = GetCertifiedModelByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, dclcompltypes.MatterCertificationType) + _, err = GetCertifiedModelByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, compliancetypes.MatterCertificationType) suite.AssertNotFound(err) - _, err = GetRevokedModelByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, dclcompltypes.MatterCertificationType) + _, err = GetRevokedModelByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, compliancetypes.MatterCertificationType) suite.AssertNotFound(err) - provisionModel, _ := GetProvisionalModelByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, dclcompltypes.MatterCertificationType) + provisionModel, _ := GetProvisionalModelByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, compliancetypes.MatterCertificationType) require.True(suite.T, provisionModel.Value) // Certify model @@ -2034,8 +2065,8 @@ func DemoTrackProvisionByHexVidAndPid(suite *utils.TestSuite) { require.NoError(suite.T, err) // Check model is certified - complianceInfo, _ = GetComplianceInfoByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, dclcompltypes.MatterCertificationType) - require.Equal(suite.T, dclcompltypes.MatterCertificationType, complianceInfo.CertificationType) + complianceInfo, _ = GetComplianceInfoByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, compliancetypes.MatterCertificationType) + require.Equal(suite.T, compliancetypes.MatterCertificationType, complianceInfo.CertificationType) require.Equal(suite.T, uint32(2), complianceInfo.SoftwareVersionCertificationStatus) require.Equal(suite.T, vid, complianceInfo.Vid) require.Equal(suite.T, pid, complianceInfo.Pid) @@ -2043,11 +2074,11 @@ func DemoTrackProvisionByHexVidAndPid(suite *utils.TestSuite) { require.Equal(suite.T, testconstants.CDCertificateID, complianceInfo.CDCertificateId) require.Equal(suite.T, certReason, complianceInfo.Reason) require.Equal(suite.T, certDate, complianceInfo.Date) - certifiedModel, _ := GetCertifiedModelByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, dclcompltypes.MatterCertificationType) + certifiedModel, _ := GetCertifiedModelByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, compliancetypes.MatterCertificationType) require.True(suite.T, certifiedModel.Value) - revokedModel, _ := GetRevokedModelByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, dclcompltypes.MatterCertificationType) + revokedModel, _ := GetRevokedModelByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, compliancetypes.MatterCertificationType) require.False(suite.T, revokedModel.Value) - provisionalModel, _ := GetProvisionalModelByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, dclcompltypes.MatterCertificationType) + provisionalModel, _ := GetProvisionalModelByHexVidAndPid(suite, testconstants.TestVID3String, testconstants.TestPID3String, sv, compliancetypes.MatterCertificationType) require.False(suite.T, provisionalModel.Value) // Can not provision certified model diff --git a/integration_tests/grpc_rest/dclauth/helpers.go b/integration_tests/grpc_rest/dclauth/helpers.go index e8ee7b0c3..1b448060e 100644 --- a/integration_tests/grpc_rest/dclauth/helpers.go +++ b/integration_tests/grpc_rest/dclauth/helpers.go @@ -394,7 +394,7 @@ func RejectAddAccount( return suite.BuildAndBroadcastTx([]sdk.Msg{msg}, signerName, signerAccount) } -func CreateAccountInfo(suite *utils.TestSuite, accountName string) keyring.Info { +func CreateAccountInfo(suite *utils.TestSuite, accountName string) keyring.Record { entropySeed, err := bip39.NewEntropy(256) require.NoError(suite.T, err) @@ -404,7 +404,7 @@ func CreateAccountInfo(suite *utils.TestSuite, accountName string) keyring.Info accountInfo, err := suite.Kr.NewAccount(accountName, mnemonic, testconstants.Passphrase, sdk.FullFundraiserPath, hd.Secp256k1) require.NoError(suite.T, err) - return accountInfo + return *accountInfo } func CreateAccount( @@ -420,11 +420,15 @@ func CreateAccount( info string, ) *dclauthtypes.Account { accountInfo := CreateAccountInfo(suite, accountName) + address, err := accountInfo.GetAddress() + require.NoError(suite.T, err) + pubKey, err := accountInfo.GetPubKey() + require.NoError(suite.T, err) - _, err := ProposeAddAccount( + _, err = ProposeAddAccount( suite, - accountInfo.GetAddress(), - accountInfo.GetPubKey(), + address, + pubKey, roles, vendorID, productIDs, @@ -434,16 +438,21 @@ func CreateAccount( ) require.NoError(suite.T, err) + address, err = accountInfo.GetAddress() + require.NoError(suite.T, err) _, err = ApproveAddAccount( suite, - accountInfo.GetAddress(), + address, approverName, approverAccount, info, ) require.NoError(suite.T, err) - account, err := GetAccount(suite, accountInfo.GetAddress()) + address, err = accountInfo.GetAddress() + require.NoError(suite.T, err) + + account, err := GetAccount(suite, address) require.NoError(suite.T, err) return account @@ -457,16 +466,20 @@ func CreateVendorAccount( productIDs []*types.Uint16Range, proposerName string, proposerAccount *dclauthtypes.Account, - approverName string, - approverAccount *dclauthtypes.Account, + _ string, + _ *dclauthtypes.Account, info string, ) *dclauthtypes.Account { accountInfo := CreateAccountInfo(suite, accountName) + address, err := accountInfo.GetAddress() + require.NoError(suite.T, err) + pubKey, err := accountInfo.GetPubKey() + require.NoError(suite.T, err) - _, err := ProposeAddAccount( + _, err = ProposeAddAccount( suite, - accountInfo.GetAddress(), - accountInfo.GetPubKey(), + address, + pubKey, roles, vendorID, productIDs, @@ -476,7 +489,10 @@ func CreateVendorAccount( ) require.NoError(suite.T, err) - account, err := GetAccount(suite, accountInfo.GetAddress()) + address, err = accountInfo.GetAddress() + require.NoError(suite.T, err) + + account, err := GetAccount(suite, address) require.NoError(suite.T, err) return account @@ -512,19 +528,25 @@ func AuthDemo(suite *utils.TestSuite) { jackName := testconstants.JackAccount jackKeyInfo, err := suite.Kr.Key(jackName) require.NoError(suite.T, err) - jackAccount, err := GetAccount(suite, jackKeyInfo.GetAddress()) + address, err := jackKeyInfo.GetAddress() + require.NoError(suite.T, err) + jackAccount, err := GetAccount(suite, address) require.NoError(suite.T, err) aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err = aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := GetAccount(suite, address) require.NoError(suite.T, err) // Query all active accounts @@ -548,8 +570,10 @@ func AuthDemo(suite *utils.TestSuite) { accountName := utils.RandString() accountInfo := CreateAccountInfo(suite, accountName) - testAccPubKey := accountInfo.GetPubKey() - testAccAddr := accountInfo.GetAddress() + testAccPubKey, err := accountInfo.GetPubKey() + require.NoError(suite.T, err) + testAccAddr, err := accountInfo.GetAddress() + require.NoError(suite.T, err) // Query unknown account _, err = GetAccount(suite, testAccAddr) @@ -947,8 +971,10 @@ func AuthDemo(suite *utils.TestSuite) { accountName = utils.RandString() accountInfo = CreateAccountInfo(suite, accountName) - testAccPubKey = accountInfo.GetPubKey() - testAccAddr = accountInfo.GetAddress() + testAccPubKey, err = accountInfo.GetPubKey() + require.NoError(suite.T, err) + testAccAddr, err = accountInfo.GetAddress() + require.NoError(suite.T, err) // Query unknown account _, err = GetAccount(suite, testAccAddr) @@ -1022,8 +1048,10 @@ func AuthDemo(suite *utils.TestSuite) { accountName = utils.RandString() accountInfo = CreateAccountInfo(suite, accountName) - testAccPubKey = accountInfo.GetPubKey() - testAccAddr = accountInfo.GetAddress() + testAccPubKey, err = accountInfo.GetPubKey() + require.NoError(suite.T, err) + testAccAddr, err = accountInfo.GetAddress() + require.NoError(suite.T, err) // Query unknown account _, err = GetAccount(suite, testAccAddr) @@ -1075,8 +1103,10 @@ func AuthDemo(suite *utils.TestSuite) { accountName = utils.RandString() accountInfo = CreateAccountInfo(suite, accountName) - testAccPubKey = accountInfo.GetPubKey() - testAccAddr = accountInfo.GetAddress() + testAccPubKey, err = accountInfo.GetPubKey() + require.NoError(suite.T, err) + testAccAddr, err = accountInfo.GetAddress() + require.NoError(suite.T, err) // Jack proposes new account _, err = ProposeAddAccount( @@ -1099,8 +1129,10 @@ func AuthDemo(suite *utils.TestSuite) { accountName = utils.RandString() accountInfo = CreateAccountInfo(suite, accountName) - testAccPubKey = accountInfo.GetPubKey() - testAccAddr = accountInfo.GetAddress() + testAccPubKey, err = accountInfo.GetPubKey() + require.NoError(suite.T, err) + testAccAddr, err = accountInfo.GetAddress() + require.NoError(suite.T, err) // Jack proposes new account _, err = ProposeAddAccount( @@ -1135,8 +1167,10 @@ func AuthDemo(suite *utils.TestSuite) { accountName = utils.RandString() accountInfo = CreateAccountInfo(suite, accountName) - testAccPubKey = accountInfo.GetPubKey() - testAccAddr = accountInfo.GetAddress() + testAccPubKey, err = accountInfo.GetPubKey() + require.NoError(suite.T, err) + testAccAddr, err = accountInfo.GetAddress() + require.NoError(suite.T, err) // Query unknown account _, err = GetAccount(suite, testAccAddr) diff --git a/integration_tests/grpc_rest/dclupgrade/helpers.go b/integration_tests/grpc_rest/dclupgrade/helpers.go index c94d7884a..bf35bce25 100644 --- a/integration_tests/grpc_rest/dclupgrade/helpers.go +++ b/integration_tests/grpc_rest/dclupgrade/helpers.go @@ -4,11 +4,11 @@ import ( "context" "fmt" + tmrand "github.com/cometbft/cometbft/libs/rand" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" test_dclauth "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/grpc_rest/dclauth" "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/utils" @@ -254,19 +254,25 @@ func Demo(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := test_dclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) jackName := testconstants.JackAccount jackKeyInfo, err := suite.Kr.Key(jackName) require.NoError(suite.T, err) - jackAccount, err := test_dclauth.GetAccount(suite, jackKeyInfo.GetAddress()) + address, err = jackKeyInfo.GetAddress() + require.NoError(suite.T, err) + jackAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) // trustee proposes upgrade @@ -426,13 +432,17 @@ func ProposeUpgradeByNonTrustee(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := test_dclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new account without Trustee role @@ -466,13 +476,17 @@ func ApproveUpgradeByNonTrustee(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := test_dclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) // propose upgrade @@ -511,13 +525,17 @@ func ProposeUpgradeTwice(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount - bobKyInfo, err := suite.Kr.Key(bobName) + bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := test_dclauth.GetAccount(suite, bobKyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) planName, height, info := utils.RandString(), int64(100000), utils.RandString() @@ -544,7 +562,9 @@ func ProposeAndRejectUpgrade(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) planName, height, info := utils.RandString(), int64(100000), utils.RandString() diff --git a/integration_tests/grpc_rest/model/grpc_test.go b/integration_tests/grpc_rest/model/grpc_test.go index b18b196c7..30af60fd6 100644 --- a/integration_tests/grpc_rest/model/grpc_test.go +++ b/integration_tests/grpc_rest/model/grpc_test.go @@ -89,25 +89,25 @@ func TestDeleteModelVersionCertifiedGRPC(t *testing.T) { } func TestAddModelByVendorProductIdsGRPC(t *testing.T) { suite := utils.SetupTest(t, testconstants.ChainID, false) - model.AddModelByVendorWithProductIds(&suite) + model.AddModelByVendorWithProductIDs(&suite) } func TestUpdateByVendorWithProductIdsGRPC(t *testing.T) { suite := utils.SetupTest(t, testconstants.ChainID, false) - model.UpdateByVendorWithProductIds(&suite) + model.UpdateByVendorWithProductIDs(&suite) } func TestAddModelByVendorWithNonAssociatedProductIdsGRPC(t *testing.T) { suite := utils.SetupTest(t, testconstants.ChainID, false) - model.AddModelByVendorWithNonAssociatedProductIds(&suite) + model.AddModelByVendorWithNonAssociatedProductIDs(&suite) } func TestUpdateModelByVendorWithNonAssociatedProductIdsGRPC(t *testing.T) { suite := utils.SetupTest(t, testconstants.ChainID, false) - model.UpdateModelByVendorWithNonAssociatedProductIds(&suite) + model.UpdateModelByVendorWithNonAssociatedProductIDs(&suite) } func TestDeleteModelByVendorWithNonAssociatedProductIdsGRPC(t *testing.T) { suite := utils.SetupTest(t, testconstants.ChainID, false) - model.DeleteModelByVendorWithNonAssociatedProductIds(&suite) + model.DeleteModelByVendorWithNonAssociatedProductIDs(&suite) } diff --git a/integration_tests/grpc_rest/model/helpers.go b/integration_tests/grpc_rest/model/helpers.go index 074eac00c..2784fe60f 100644 --- a/integration_tests/grpc_rest/model/helpers.go +++ b/integration_tests/grpc_rest/model/helpers.go @@ -18,10 +18,10 @@ import ( "context" "fmt" + tmrand "github.com/cometbft/cometbft/libs/rand" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" testDclauth "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/grpc_rest/dclauth" "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/utils" @@ -364,18 +364,22 @@ func GetVendorModelsByHexVid( return &res, nil } -func AddModelByVendorWithProductIds(suite *utils.TestSuite) { +func AddModelByVendorWithProductIDs(suite *utils.TestSuite) { // Alice and Bob are predefined Trustees aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new account with Vendor role @@ -400,18 +404,22 @@ func AddModelByVendorWithProductIds(suite *utils.TestSuite) { require.NoError(suite.T, err) } -func UpdateByVendorWithProductIds(suite *utils.TestSuite) { +func UpdateByVendorWithProductIDs(suite *utils.TestSuite) { // Alice and Bob are predefined Trustees aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new account with Vendor role @@ -448,13 +456,17 @@ func DeleteModelWithAssociatedModelVersions(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account @@ -513,13 +525,17 @@ func DeleteModelWithAssociatedModelVersionsCertified(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account @@ -600,13 +616,17 @@ func DeleteModelVersion(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account @@ -651,13 +671,17 @@ func DeleteModelVersionDifferentVid(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account @@ -721,13 +745,17 @@ func DeleteModelVersionDoesNotExist(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account @@ -758,13 +786,17 @@ func DeleteModelVersionNotByCreator(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account @@ -828,13 +860,17 @@ func DeleteModelVersionCertified(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account @@ -914,13 +950,17 @@ func Demo(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account @@ -1021,13 +1061,17 @@ func AddModelByNonVendor(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new account without Vendor role @@ -1056,18 +1100,22 @@ func AddModelByNonVendor(suite *utils.TestSuite) { require.True(suite.T, sdkerrors.ErrUnauthorized.Is(err)) } -func AddModelByVendorWithNonAssociatedProductIds(suite *utils.TestSuite) { +func AddModelByVendorWithNonAssociatedProductIDs(suite *utils.TestSuite) { // Alice and Bob are predefined Trustees aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new account with Vendor role @@ -1094,18 +1142,22 @@ func AddModelByVendorWithNonAssociatedProductIds(suite *utils.TestSuite) { require.True(suite.T, sdkerrors.ErrUnauthorized.Is(err)) } -func UpdateModelByVendorWithNonAssociatedProductIds(suite *utils.TestSuite) { +func UpdateModelByVendorWithNonAssociatedProductIDs(suite *utils.TestSuite) { // Alice and Bob are predefined Trustees aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new account with Vendor role @@ -1147,18 +1199,22 @@ func UpdateModelByVendorWithNonAssociatedProductIds(suite *utils.TestSuite) { require.True(suite.T, sdkerrors.ErrUnauthorized.Is(err)) } -func DeleteModelByVendorWithNonAssociatedProductIds(suite *utils.TestSuite) { +func DeleteModelByVendorWithNonAssociatedProductIDs(suite *utils.TestSuite) { // Alice and Bob are predefined Trustees aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new account with Vendor role @@ -1205,13 +1261,17 @@ func AddModelByDifferentVendor(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new Vendor account @@ -1243,13 +1303,17 @@ func AddModelTwice(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new Vendor account @@ -1315,13 +1379,17 @@ func DemoWithHexVidAndPid(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := testDclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := testDclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := testDclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account diff --git a/integration_tests/grpc_rest/model/rest_test.go b/integration_tests/grpc_rest/model/rest_test.go index b28b9e963..92a0e3a4c 100644 --- a/integration_tests/grpc_rest/model/rest_test.go +++ b/integration_tests/grpc_rest/model/rest_test.go @@ -100,25 +100,25 @@ func TestDeleteModelVersionCertifiedREST(t *testing.T) { func TestAddModelByVendorProductIdsREST(t *testing.T) { suite := utils.SetupTest(t, testconstants.ChainID, false) - model.AddModelByVendorWithProductIds(&suite) + model.AddModelByVendorWithProductIDs(&suite) } func TestUpdateByVendorWithProductIdsREST(t *testing.T) { suite := utils.SetupTest(t, testconstants.ChainID, false) - model.UpdateByVendorWithProductIds(&suite) + model.UpdateByVendorWithProductIDs(&suite) } func TestAddModelByVendorWithNonAssociatedProductIdsREST(t *testing.T) { suite := utils.SetupTest(t, testconstants.ChainID, false) - model.AddModelByVendorWithNonAssociatedProductIds(&suite) + model.AddModelByVendorWithNonAssociatedProductIDs(&suite) } func TestUpdateModelByVendorWithNonAssociatedProductIdsREST(t *testing.T) { suite := utils.SetupTest(t, testconstants.ChainID, false) - model.UpdateModelByVendorWithNonAssociatedProductIds(&suite) + model.UpdateModelByVendorWithNonAssociatedProductIDs(&suite) } func TestDeleteModelByVendorWithNonAssociatedProductIdsREST(t *testing.T) { suite := utils.SetupTest(t, testconstants.ChainID, false) - model.DeleteModelByVendorWithNonAssociatedProductIds(&suite) + model.DeleteModelByVendorWithNonAssociatedProductIDs(&suite) } diff --git a/integration_tests/grpc_rest/pki/helpers.go b/integration_tests/grpc_rest/pki/helpers.go index dc5c07cea..17c18cc10 100644 --- a/integration_tests/grpc_rest/pki/helpers.go +++ b/integration_tests/grpc_rest/pki/helpers.go @@ -639,13 +639,17 @@ func Demo(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) jackName := testconstants.JackAccount jackKeyInfo, err := suite.Kr.Key(jackName) require.NoError(suite.T, err) - jackAccount, err := test_dclauth.GetAccount(suite, jackKeyInfo.GetAddress()) + address, err = jackKeyInfo.GetAddress() + require.NoError(suite.T, err) + jackAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account diff --git a/integration_tests/grpc_rest/pki/noc_cert_helper.go b/integration_tests/grpc_rest/pki/noc_cert_helpers.go similarity index 98% rename from integration_tests/grpc_rest/pki/noc_cert_helper.go rename to integration_tests/grpc_rest/pki/noc_cert_helpers.go index c4c64ee9f..37ae26615 100644 --- a/integration_tests/grpc_rest/pki/noc_cert_helper.go +++ b/integration_tests/grpc_rest/pki/noc_cert_helpers.go @@ -5,9 +5,9 @@ import ( "fmt" "net/url" + tmrand "github.com/cometbft/cometbft/libs/rand" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" test_dclauth "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/grpc_rest/dclauth" "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/utils" @@ -185,13 +185,17 @@ func NocCertDemo(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) jackName := testconstants.JackAccount jackKeyInfo, err := suite.Kr.Key(jackName) require.NoError(suite.T, err) - jackAccount, err := test_dclauth.GetAccount(suite, jackKeyInfo.GetAddress()) + address, err = jackKeyInfo.GetAddress() + require.NoError(suite.T, err) + jackAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register first Vendor account diff --git a/integration_tests/grpc_rest/validator/helpers.go b/integration_tests/grpc_rest/validator/helpers.go index fff674b78..184c7c5de 100644 --- a/integration_tests/grpc_rest/validator/helpers.go +++ b/integration_tests/grpc_rest/validator/helpers.go @@ -4,10 +4,10 @@ import ( "context" "fmt" + tmrand "github.com/cometbft/cometbft/libs/rand" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" test_dclauth "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/grpc_rest/dclauth" "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/utils" @@ -275,19 +275,25 @@ func Demo(suite *utils.TestSuite) { jackName := testconstants.JackAccount jackKeyInfo, err := suite.Kr.Key(jackName) require.NoError(suite.T, err) - jackAccount, err := test_dclauth.GetAccount(suite, jackKeyInfo.GetAddress()) + address, err := jackKeyInfo.GetAddress() + require.NoError(suite.T, err) + jackAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err = aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := test_dclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account diff --git a/integration_tests/grpc_rest/vendorinfo/helpers.go b/integration_tests/grpc_rest/vendorinfo/helpers.go index 753de4a10..ad6ee42f5 100644 --- a/integration_tests/grpc_rest/vendorinfo/helpers.go +++ b/integration_tests/grpc_rest/vendorinfo/helpers.go @@ -4,10 +4,10 @@ import ( "context" "fmt" + tmrand "github.com/cometbft/cometbft/libs/rand" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" test_dclauth "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/grpc_rest/dclauth" "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/utils" @@ -130,13 +130,17 @@ func Demo(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := test_dclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account @@ -182,13 +186,17 @@ func AddVendorInfoByNonVendor(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := test_dclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new account without Vendor role @@ -221,13 +229,17 @@ func AddVendorInfoByDifferentVendor(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := test_dclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new Vendor account @@ -258,13 +270,17 @@ func AddVendorInfoByNonVendorAdmin(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := test_dclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new account without VendorAdmin role @@ -297,13 +313,17 @@ func AddVendorInfoByVendorAdmin(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := test_dclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new account without VendorAdmin role @@ -335,13 +355,17 @@ func UpdateVendorInfoByVendorAdmin(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := test_dclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new account without VendorAdmin role @@ -385,13 +409,17 @@ func AddVendorInfoTwice(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := test_dclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) // register new Vendor account @@ -441,13 +469,17 @@ func DemoWithHexVid(suite *utils.TestSuite) { aliceName := testconstants.AliceAccount aliceKeyInfo, err := suite.Kr.Key(aliceName) require.NoError(suite.T, err) - aliceAccount, err := test_dclauth.GetAccount(suite, aliceKeyInfo.GetAddress()) + address, err := aliceKeyInfo.GetAddress() + require.NoError(suite.T, err) + aliceAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) bobName := testconstants.BobAccount bobKeyInfo, err := suite.Kr.Key(bobName) require.NoError(suite.T, err) - bobAccount, err := test_dclauth.GetAccount(suite, bobKeyInfo.GetAddress()) + address, err = bobKeyInfo.GetAddress() + require.NoError(suite.T, err) + bobAccount, err := test_dclauth.GetAccount(suite, address) require.NoError(suite.T, err) // Register new Vendor account diff --git a/integration_tests/light_client_proxy/auth.sh b/integration_tests/light_client_proxy/auth.sh old mode 100644 new mode 100755 index 6e4fa8c01..e591dcf49 --- a/integration_tests/light_client_proxy/auth.sh +++ b/integration_tests/light_client_proxy/auth.sh @@ -82,12 +82,14 @@ vid=$RANDOM echo "Jack proposes account for $user" result=$(echo $passphrase | dcld tx auth propose-add-account --address="$user_address" --pubkey="$user_pubkey" --roles="NodeAdmin" --from jack --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Alice approves account for \"$user\"" result=$(echo $passphrase | dcld tx auth approve-add-account --address="$user_address" --from alice --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider diff --git a/integration_tests/light_client_proxy/compliance.sh b/integration_tests/light_client_proxy/compliance.sh old mode 100644 new mode 100755 index 0b4ed678a..8c3cdcd36 --- a/integration_tests/light_client_proxy/compliance.sh +++ b/integration_tests/light_client_proxy/compliance.sh @@ -114,6 +114,8 @@ test_divider echo "Certify Model with VID: $vid PID: $pid SV: ${sv} with zigbee certification" result=$(echo "$passphrase" | dcld tx compliance certify-model --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=$svs --certificationType="zigbee" --certificationDate="$certification_date" --cdCertificateId="$cd_certificate_id" --cdVersionNumber=1 --from $zb_account --yes) +result=$(get_txn_result "$result") + echo "$result" check_response "$result" "\"code\": 0" diff --git a/integration_tests/light_client_proxy/model.sh b/integration_tests/light_client_proxy/model.sh old mode 100644 new mode 100755 index d8c35f850..16583065b --- a/integration_tests/light_client_proxy/model.sh +++ b/integration_tests/light_client_proxy/model.sh @@ -77,6 +77,7 @@ test_divider productLabel="Device #1" echo "Add Model with VID: $vid PID: $pid" result=$(echo "test1234" | dcld tx model add-model --vid=$vid --pid=$pid --deviceTypeID=1 --productName=TestProduct --productLabel="$productLabel" --partNumber=1 --commissioningCustomFlow=0 --from=$vendor_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" @@ -84,6 +85,7 @@ test_divider echo "Create a Device Model Version with VID: $vid PID: $pid SV: $sv" result=$(echo 'test1234' | dcld tx model add-model-version --cdVersionNumber=1 --maxApplicableSoftwareVersion=10 --minApplicableSoftwareVersion=1 --vid=$vid --pid=$pid --softwareVersion=$sv --softwareVersionString=1 --from=$vendor_account --yes) +result=$(get_txn_result "$result") echo "$result" check_response "$result" "\"code\": 0" diff --git a/integration_tests/light_client_proxy/pki.sh b/integration_tests/light_client_proxy/pki.sh index cb6f10156..d6f2fb9fd 100755 --- a/integration_tests/light_client_proxy/pki.sh +++ b/integration_tests/light_client_proxy/pki.sh @@ -140,6 +140,7 @@ test_divider echo "$vendor_account (Not Trustee) propose Root certificate" root_path="integration_tests/constants/root_cert" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_path" --vid $vid --from $vendor_account --yes) +result=$(get_txn_result "$result") response_does_not_contain "$result" "\"code\": 0" echo "$result" @@ -148,6 +149,7 @@ test_divider echo "$trustee_account (Trustee) propose Root certificate" root_path="integration_tests/constants/root_cert" result=$(echo "$passphrase" | dcld tx pki propose-add-x509-root-cert --certificate="$root_path" --vid $vid --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -155,6 +157,7 @@ test_divider echo "$second_trustee_account (Trustee) approve Root certificate" result=$(echo "$passphrase" | dcld tx pki approve-add-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $second_trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -163,6 +166,7 @@ test_divider echo "$vendor_account (Vendor) add Intermediate certificate" intermediate_path="integration_tests/constants/intermediate_cert" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$intermediate_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -171,6 +175,7 @@ test_divider echo "$vendor_account (Vendor) add Leaf certificate" leaf_path="integration_tests/constants/leaf_cert" result=$(echo "$passphrase" | dcld tx pki add-x509-cert --certificate="$leaf_path" --from $vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -178,6 +183,7 @@ test_divider echo "$vendor_account (Vendor) revokes Leaf certificate." result=$(echo "$passphrase" | dcld tx pki revoke-x509-cert --subject="$leaf_cert_subject" --subject-key-id="$leaf_cert_subject_key_id" --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" @@ -185,6 +191,7 @@ test_divider echo "$trustee_account (Trustee) proposes to revoke Root certificate" result=$(echo "$passphrase" | dcld tx pki propose-revoke-x509-root-cert --subject="$root_cert_subject" --subject-key-id="$root_cert_subject_key_id" --from $trustee_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" diff --git a/integration_tests/light_client_proxy/vendorinfo.sh b/integration_tests/light_client_proxy/vendorinfo.sh old mode 100644 new mode 100755 index e29bf1979..e54a1a877 --- a/integration_tests/light_client_proxy/vendorinfo.sh +++ b/integration_tests/light_client_proxy/vendorinfo.sh @@ -53,6 +53,7 @@ echo "Create VendorInfo Record for VID: $vid" companyLegalName="XYZ IOT Devices Inc" vendorName="XYZ Devices" result=$(echo "test1234" | dcld tx vendorinfo add-vendor --vid=$vid --companyLegalName="$companyLegalName" --vendorName="$vendorName" --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "$result" diff --git a/integration_tests/run-all.sh b/integration_tests/run-all.sh index 0c6669254..2535e71f6 100755 --- a/integration_tests/run-all.sh +++ b/integration_tests/run-all.sh @@ -82,6 +82,7 @@ init_pool() { make localnet_start &>${DETAILED_OUTPUT_TARGET} log "-> Waiting for the second block (needed to request proofs)" >${DETAILED_OUTPUT_TARGET} + execute_with_retry "dcld status" "connection" wait_for_height 2 20 } @@ -110,6 +111,40 @@ make image &>${DETAILED_OUTPUT_TARGET} cleanup_pool +# Upgrade procedure tests +if [[ $TESTS_TO_RUN =~ "all" || $TESTS_TO_RUN =~ "upgrade" ]]; then + UPGRADE_SHELL_TEST="./integration_tests/upgrade/test-upgrade.sh" + + init_pool yes localnet_init_latest_stable_release "v0.12.0" + + log "*****************************************************************************************" + log "Running $UPGRADE_SHELL_TEST" + log "*****************************************************************************************" + + if bash "$UPGRADE_SHELL_TEST" &>${DETAILED_OUTPUT_TARGET}; then + rm dcld_mainnet_stable + log "$UPGRADE_SHELL_TEST finished successfully" + source integration_tests/upgrade/add-new-node-after-upgrade.sh + check_adding_new_node + else + log "$UPGRADE_SHELL_TEST failed" + exit 1 + fi + + cleanup_pool +fi + +# Deploy tests +if [[ $TESTS_TO_RUN =~ "all" || $TESTS_TO_RUN =~ "deploy" ]]; then + DEPLOY_SHELL_TEST="./integration_tests/deploy/test_deploy.sh" + if bash "$DEPLOY_SHELL_TEST" &>${DETAILED_OUTPUT_TARGET}; then + log "$DEPLOY_SHELL_TEST finished successfully" + else + log "$DEPLOY_SHELL_TEST failed" + exit 1 + fi +fi + # Cli shell tests if [[ $TESTS_TO_RUN =~ "all" || $TESTS_TO_RUN =~ "cli" ]]; then CLI_SHELL_TESTS=$(find integration_tests/cli -type f -name '*.sh' -not -name "common.sh") @@ -179,37 +214,3 @@ if [[ $TESTS_TO_RUN =~ "all" || $TESTS_TO_RUN =~ "rest" ]]; then cleanup_pool done fi - -# Deploy tests -if [[ $TESTS_TO_RUN =~ "all" || $TESTS_TO_RUN =~ "deploy" ]]; then - DEPLOY_SHELL_TEST="./integration_tests/deploy/test_deploy.sh" - if bash "$DEPLOY_SHELL_TEST" &>${DETAILED_OUTPUT_TARGET}; then - log "$DEPLOY_SHELL_TEST finished successfully" - else - log "$DEPLOY_SHELL_TEST failed" - exit 1 - fi -fi - -# Upgrade procedure tests -if [[ $TESTS_TO_RUN =~ "all" || $TESTS_TO_RUN =~ "upgrade" ]]; then - UPGRADE_SHELL_TEST="./integration_tests/upgrade/test-upgrade.sh" - - init_pool yes localnet_init_latest_stable_release "v0.12.0" - - log "*****************************************************************************************" - log "Running $UPGRADE_SHELL_TEST" - log "*****************************************************************************************" - - if bash "$UPGRADE_SHELL_TEST" &>${DETAILED_OUTPUT_TARGET}; then - rm dcld_mainnet_stable - log "$UPGRADE_SHELL_TEST finished successfully" - source integration_tests/upgrade/add-new-node-after-upgrade.sh - check_adding_new_node - else - log "$UPGRADE_SHELL_TEST failed" - exit 1 - fi - - cleanup_pool -fi \ No newline at end of file diff --git a/integration_tests/upgrade/01-test-upgrade-initialize-0.12.sh b/integration_tests/upgrade/01-test-upgrade-initialize-0.12.sh old mode 100644 new mode 100755 index df3da2975..91431fd2c --- a/integration_tests/upgrade/01-test-upgrade-initialize-0.12.sh +++ b/integration_tests/upgrade/01-test-upgrade-initialize-0.12.sh @@ -22,10 +22,10 @@ wget -O dcld-initial "https://github.com/zigbee-alliance/distributed-compliance- chmod ugo+x dcld-initial DCLD_BIN="./dcld-initial" +$DCLD_BIN config broadcast-mode block container="validator-demo" add_validator_node() { - # FIXME: as it's called before upgrade, mainnet stable version of dcld needs to be used (not the latest master) random_string account address="" LOCALNET_DIR=".localnet" @@ -41,15 +41,9 @@ add_validator_node() { passphrase="test1234" docker_network="distributed-compliance-ledger_localnet" - docker build -f Dockerfile-build -t dcld-build . - docker container create --name dcld-build-inst dcld-build - docker cp dcld-build-inst:/go/bin/dcld ./ - docker rm dcld-build-inst - docker run -d --name $container --ip $ip -p "$node_p2p_port-$node_client_port:26656-26657" --network $docker_network -i dcledger - docker cp ./dcld "$container":"$DCL_USER_HOME"/ - rm -f ./dcld + docker cp $DCLD_BIN "$container":"$DCL_USER_HOME"/dcld test_divider @@ -78,30 +72,30 @@ add_validator_node() { address="$(docker exec $container /bin/sh -c "echo $passphrase | ./dcld keys show $account -a")" pubkey="$(docker exec $container /bin/sh -c "echo $passphrase | ./dcld keys show $account -p")" - alice_address="$(dcld keys show alice -a)" - bob_address="$(dcld keys show bob -a)" - jack_address="$(dcld keys show jack -a)" + alice_address="$($DCLD_BIN keys show alice -a)" + bob_address="$($DCLD_BIN keys show bob -a)" + jack_address="$($DCLD_BIN keys show jack -a)" echo "Create account for $account and Assign NodeAdmin role" - echo $passphrase | dcld tx auth propose-add-account --address="$address" --pubkey="$pubkey" --roles="NodeAdmin" --from jack --yes - echo $passphrase | dcld tx auth approve-add-account --address="$address" --from alice --yes - echo $passphrase | dcld tx auth approve-add-account --address="$address" --from bob --yes + echo $passphrase | $DCLD_BIN tx auth propose-add-account --address="$address" --pubkey="$pubkey" --roles="NodeAdmin" --from jack --yes + echo $passphrase | $DCLD_BIN tx auth approve-add-account --address="$address" --from alice --yes + echo $passphrase | $DCLD_BIN tx auth approve-add-account --address="$address" --from bob --yes test_divider vaddress=$(docker exec $container ./dcld tendermint show-address) vpubkey=$(docker exec $container ./dcld tendermint show-validator) echo "Check pool response for yet unknown node \"$node_name\"" - result=$(dcld query validator node --address "$address") + result=$($DCLD_BIN query validator node --address "$address") check_response "$result" "Not Found" echo "$result" - result=$(dcld query validator last-power --address "$address") + result=$($DCLD_BIN query validator last-power --address "$address") check_response "$result" "Not Found" echo "$result" echo "$account Add Node \"$node_name\" to validator set" ! read -r -d '' _script << EOF - set -eu; echo test1234 | dcld tx validator add-node --pubkey='$vpubkey' --moniker="$node_name" --from="$account" --yes + set -eu; echo test1234 | $DCLD_BIN tx validator add-node --pubkey='$vpubkey' --moniker="$node_name" --from="$account" --yes EOF result="$(docker exec "$container" /bin/sh -c "echo test1234 | ./dcld tx validator add-node --pubkey='$vpubkey' --moniker="$node_name" --from="$account" --yes")" check_response "$result" "\"code\": 0" @@ -116,10 +110,10 @@ EOF docker exec $container cp -f ./dcld "$DCL_DIR"/cosmovisor/genesis/bin/ echo "$account Start Node \"$node_name\"" - docker exec -d $container cosmovisor start + docker exec -d $container cosmovisor run start sleep 10 - result=$(dcld query validator node --address "$address") + result=$($DCLD_BIN query validator node --address "$address") validator_address=$(echo "$result" | jq -r '.owner') echo "$result" } @@ -199,10 +193,18 @@ user_3_address=$(echo $passphrase | $DCLD_BIN keys show $user_3 -a) user_3_pubkey=$(echo $passphrase | $DCLD_BIN keys show $user_3 -p) echo "Create Vendor account $vendor_account" -create_new_vendor_account $vendor_account $vid +result="$(echo $passphrase | $DCLD_BIN keys add "$vendor_account")" +_address=$(echo $passphrase | $DCLD_BIN keys show $vendor_account -a) +_pubkey=$(echo $passphrase | $DCLD_BIN keys show $vendor_account -p) +result="$(echo $passphrase | $DCLD_BIN tx auth propose-add-account --address="$_address" --pubkey="$_pubkey" --vid="$vid" --roles="Vendor" --from jack --yes)" echo "Create CertificationCenter account" -create_new_account certification_center_account "CertificationCenter" +certification_center_account="certification_center_account_" +result="$(echo $passphrase | $DCLD_BIN keys add $certification_center_account)" +_address=$(echo $passphrase | $DCLD_BIN keys show $certification_center_account -a) +_pubkey=$(echo $passphrase | $DCLD_BIN keys show $certification_center_account -p) +result="$(echo $passphrase | $DCLD_BIN tx auth propose-add-account --address="$_address" --pubkey="$_pubkey" --roles="CertificationCenter" --from jack --yes)" +result="$(echo $passphrase | $DCLD_BIN tx auth approve-add-account --address="$_address" --from alice --yes)" random_string trustee_account_4 random_string trustee_account_5 diff --git a/integration_tests/upgrade/02-test-upgrade-0.12-to-1.2.sh b/integration_tests/upgrade/02-test-upgrade-0.12-to-1.2.sh old mode 100644 new mode 100755 index e40526815..c466c05e9 --- a/integration_tests/upgrade/02-test-upgrade-0.12-to-1.2.sh +++ b/integration_tests/upgrade/02-test-upgrade-0.12-to-1.2.sh @@ -721,14 +721,12 @@ test_divider # VALIDATOR_NODE echo "Disable node" -# FIXME: use proper binary (not dcld but $DCLD_BIN_OLD) result=$(docker exec "$container" /bin/sh -c "echo test1234 | dcld tx validator disable-node --from=$account --yes") check_response "$result" "\"code\": 0" test_divider echo "Enable node" -# FIXME: use proper binary (not dcld but $DCLD_BIN_OLD) result=$(docker exec "$container" /bin/sh -c "echo test1234 | dcld tx validator enable-node --from=$account --yes") check_response "$result" "\"code\": 0" @@ -753,7 +751,6 @@ check_response "$result" "\"code\": 0" test_divider echo "Enable node" -# FIXME: use proper binary (not dcld but $DCLD_BIN_OLD) result=$(docker exec "$container" /bin/sh -c "echo test1234 | dcld tx validator enable-node --from=$account --yes") check_response "$result" "\"code\": 0" @@ -768,7 +765,6 @@ test_divider # Validator echo "Get node" -# FIXME: use proper binary (not dcld but $DCLD_BIN_OLD) result=$(docker exec "$container" /bin/sh -c "echo test1234 | dcld query validator all-nodes") check_response "$result" "\"owner\": \"$validator_address\"" diff --git a/integration_tests/upgrade/03-test-upgrade-1.2-to-1.4.sh b/integration_tests/upgrade/03-test-upgrade-1.2-to-1.4.sh old mode 100644 new mode 100755 index 5a3161e3b..bf19731eb --- a/integration_tests/upgrade/03-test-upgrade-1.2-to-1.4.sh +++ b/integration_tests/upgrade/03-test-upgrade-1.2-to-1.4.sh @@ -19,9 +19,9 @@ source integration_tests/cli/common.sh # Upgrade constants plan_name="v1.4" -upgrade_checksum="sha256:d65ffe8eb8b2751a6746d5694c027d40f4ebdd0764549cbf6bb83b358138108f" +upgrade_checksum="sha256:1dc4a44041dcaf11545b7543dfe942f4742f568be68fee275441b092bb60deed" binary_version_old="v1.2.2" -binary_version_new="v1.4.0-dev1" +binary_version_new="v1.4.0-dev3" wget -O dcld_old "https://github.com/zigbee-alliance/distributed-compliance-ledger/releases/download/$binary_version_old/dcld" chmod ugo+x dcld_old @@ -31,7 +31,7 @@ chmod ugo+x dcld_new DCLD_BIN_OLD="./dcld_old" DCLD_BIN_NEW="./dcld_new" - +$DCLD_BIN_NEW config broadcast-mode sync ######################################################################################## # Upgrade to version 1.4 @@ -537,12 +537,18 @@ vendor_account_for_1_4="vendor_account_65521" echo "Create Vendor account $vendor_account_for_1_4" result="$(echo $passphrase | $DCLD_BIN_NEW keys add "$vendor_account_for_1_4")" +echo "keys add $result" _address=$(echo $passphrase | $DCLD_BIN_NEW keys show $vendor_account_for_1_4 -a) _pubkey=$(echo $passphrase | $DCLD_BIN_NEW keys show $vendor_account_for_1_4 -p) -result="$(echo $passphrase | $DCLD_BIN_NEW tx auth propose-add-account --address="$_address" --pubkey="$_pubkey" --vid="$vid_for_1_4" --roles="Vendor" --from "$trustee_account_1" --yes)" -result="$(echo $passphrase | $DCLD_BIN_NEW tx auth approve-add-account --address="$_address" --from "$trustee_account_2" --yes)" -result="$(echo $passphrase | $DCLD_BIN_NEW tx auth approve-add-account --address="$_address" --from "$trustee_account_3" --yes)" -result="$(echo $passphrase | $DCLD_BIN_NEW tx auth approve-add-account --address="$_address" --from "$trustee_account_4" --yes)" +result=$(echo $passphrase | $DCLD_BIN_NEW tx auth propose-add-account --address="$_address" --pubkey="$_pubkey" --vid="$vid_for_1_4" --roles="Vendor" --from "$trustee_account_1" --yes) +echo "propose-add-account $result" +result=$(get_txn_result "$result") +result=$(echo $passphrase | $DCLD_BIN_NEW tx auth approve-add-account --address="$_address" --from "$trustee_account_2" --yes) +result=$(get_txn_result "$result") +result=$(echo $passphrase | $DCLD_BIN_NEW tx auth approve-add-account --address="$_address" --from "$trustee_account_3" --yes) +result=$(get_txn_result "$result") +result=$(echo $passphrase | $DCLD_BIN_NEW tx auth approve-add-account --address="$_address" --from "$trustee_account_4" --yes) +result=$(get_txn_result "$result") random_string user_7 echo "$user_7 generates keys" @@ -570,12 +576,15 @@ user_9_pubkey=$(echo $passphrase | $DCLD_BIN_NEW keys show $user_9 -p) # VENDOR_INFO echo "Add vendor $vendor_name_for_1_4" result=$(echo $passphrase | $DCLD_BIN_NEW tx vendorinfo add-vendor --vid=$vid_for_1_4 --vendorName=$vendor_name_for_1_4 --companyLegalName=$company_legal_name_for_1_4 --companyPreferredName=$company_preferred_name_for_1_4 --vendorLandingPageURL=$vendor_landing_page_url_for_1_4 --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Update vendor $vendor_name_for_1_2" result=$(echo $passphrase | $DCLD_BIN_NEW tx vendorinfo update-vendor --vid=$vid_for_1_2 --vendorName=$vendor_name_for_1_2 --companyLegalName=$company_legal_name_for_1_2 --companyPreferredName=$company_preferred_name_for_1_4 --vendorLandingPageURL=$vendor_landing_page_url_for_1_4 --from=$vendor_account_for_1_2 --yes) +result=$(get_txn_result "$result") +echo $result check_response "$result" "\"code\": 0" test_divider @@ -584,52 +593,61 @@ test_divider echo "Add model vid=$vid_for_1_4 pid=$pid_1_for_1_4" result=$(echo $passphrase | $DCLD_BIN_NEW tx model add-model --vid=$vid_for_1_4 --pid=$pid_1_for_1_4 --deviceTypeID=$device_type_id_for_1_4 --productName=$product_name_for_1_4 --productLabel=$product_label_for_1_4 --partNumber=$part_number_for_1_4 --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Add model version vid=$vid_for_1_4 pid=$pid_1_for_1_4" result=$(echo $passphrase | $DCLD_BIN_NEW tx model add-model-version --vid=$vid_for_1_4 --pid=$pid_1_for_1_4 --softwareVersion=$software_version_for_1_4 --softwareVersionString=$software_version_string_for_1_4 --cdVersionNumber=$cd_version_number_for_1_4 --minApplicableSoftwareVersion=$min_applicable_software_version_for_1_4 --maxApplicableSoftwareVersion=$max_applicable_software_version_for_1_4 --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Add model vid=$vid_for_1_4 pid=$pid_2_for_1_4" result=$(echo $passphrase | $DCLD_BIN_NEW tx model add-model --vid=$vid_for_1_4 --pid=$pid_2_for_1_4 --deviceTypeID=$device_type_id_for_1_4 --productName=$product_name_for_1_4 --productLabel=$product_label_for_1_4 --partNumber=$part_number_for_1_4 --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Add model version vid=$vid_for_1_4 pid=$pid_2_for_1_4" result=$(echo $passphrase | $DCLD_BIN_NEW tx model add-model-version --vid=$vid_for_1_4 --pid=$pid_2_for_1_4 --softwareVersion=$software_version_for_1_4 --softwareVersionString=$software_version_string_for_1_4 --cdVersionNumber=$cd_version_number_for_1_4 --minApplicableSoftwareVersion=$min_applicable_software_version_for_1_4 --maxApplicableSoftwareVersion=$max_applicable_software_version_for_1_4 --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Add model vid=$vid_for_1_4 pid=$pid_3_for_1_4" result=$(echo $passphrase | $DCLD_BIN_NEW tx model add-model --vid=$vid_for_1_4 --pid=$pid_3_for_1_4 --deviceTypeID=$device_type_id_for_1_4 --productName=$product_name_for_1_4 --productLabel=$product_label_for_1_4 --partNumber=$part_number_for_1_4 --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" echo "Add model version vid=$vid_for_1_4 pid=$pid_3_for_1_4" result=$(echo $passphrase | $DCLD_BIN_NEW tx model add-model-version --vid=$vid_for_1_4 --pid=$pid_3_for_1_4 --softwareVersion=$software_version_for_1_4 --softwareVersionString=$software_version_string_for_1_4 --cdVersionNumber=$cd_version_number_for_1_4 --minApplicableSoftwareVersion=$min_applicable_software_version_for_1_4 --maxApplicableSoftwareVersion=$max_applicable_software_version_for_1_4 --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Delete model vid=$vid_for_1_4 pid=$pid_3_for_1_4" result=$(echo $passphrase | $DCLD_BIN_NEW tx model delete-model --vid=$vid_for_1_4 --pid=$pid_3_for_1_4 --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Update model vid=$vid pid=$pid_2" result=$(echo $passphrase | $DCLD_BIN_NEW tx model update-model --vid=$vid --pid=$pid_2 --productName=$product_name --productLabel=$product_label_for_1_4 --partNumber=$part_number_for_1_4 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Update model version vid=$vid pid=$pid_2" result=$(echo $passphrase | $DCLD_BIN_NEW tx model update-model-version --vid=$vid --pid=$pid_2 --softwareVersion=$software_version --minApplicableSoftwareVersion=$min_applicable_software_version_for_1_4 --maxApplicableSoftwareVersion=$max_applicable_software_version_for_1_4 --from=$vendor_account --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -638,24 +656,28 @@ test_divider echo "Certify model vid=$vid_for_1_4 pid=$pid_1_for_1_4" result=$(echo $passphrase | $DCLD_BIN_NEW tx compliance certify-model --vid=$vid_for_1_4 --pid=$pid_1_for_1_4 --softwareVersion=$software_version_for_1_4 --softwareVersionString=$software_version_string_for_1_4 --certificationType=$certification_type_for_1_4 --certificationDate=$certification_date_for_1_4 --cdCertificateId=$cd_certificate_id_for_1_4 --from=$certification_center_account --cdVersionNumber=$cd_version_number_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Provision model vid=$vid_for_1_4 pid=$pid_2_for_1_4" result=$(echo $passphrase | $DCLD_BIN_NEW tx compliance provision-model --vid=$vid_for_1_4 --pid=$pid_2_for_1_4 --softwareVersion=$software_version_for_1_4 --softwareVersionString=$software_version_string_for_1_4 --certificationType=$certification_type_for_1_4 --provisionalDate=$provisional_date_for_1_4 --cdCertificateId=$cd_certificate_id_for_1_4 --from=$certification_center_account --cdVersionNumber=$cd_version_number_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Certify model vid=$vid_for_1_4 pid=$pid_2_for_1_4" result=$(echo $passphrase | $DCLD_BIN_NEW tx compliance certify-model --vid=$vid_for_1_4 --pid=$pid_2_for_1_4 --softwareVersion=$software_version_for_1_4 --softwareVersionString=$software_version_string_for_1_4 --certificationType=$certification_type_for_1_4 --certificationDate=$certification_date_for_1_4 --cdCertificateId=$cd_certificate_id_for_1_4 --from=$certification_center_account --cdVersionNumber=$cd_version_number_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Revoke model certification vid=$vid_for_1_4 pid=$pid_2_for_1_4" result=$(echo $passphrase | $DCLD_BIN_NEW tx compliance revoke-model --vid=$vid_for_1_4 --pid=$pid_2_for_1_4 --softwareVersion=$software_version_for_1_4 --softwareVersionString=$software_version_string_for_1_4 --certificationType=$certification_type_for_1_4 --revocationDate=$certification_date_for_1_4 --from=$certification_center_account --cdVersionNumber=$cd_version_number_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -664,126 +686,147 @@ test_divider echo "Propose add root_cert_with_vid" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki propose-add-x509-root-cert --certificate="$root_cert_with_vid_path" --vid="$vid_for_1_4" --from=$trustee_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve add root_cert_with_vid" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki approve-add-x509-root-cert --subject="$root_cert_with_vid_subject" --subject-key-id=$root_cert_with_vid_subject_key_id --from=$trustee_account_2 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "reject add root_cert_with_vid" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki reject-add-x509-root-cert --subject="$root_cert_with_vid_subject" --subject-key-id=$root_cert_with_vid_subject_key_id --from=$trustee_account_3 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve add root_cert_with_vid" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki approve-add-x509-root-cert --subject="$root_cert_with_vid_subject" --subject-key-id=$root_cert_with_vid_subject_key_id --from=$trustee_account_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve add root_cert_with_vid" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki approve-add-x509-root-cert --subject="$root_cert_with_vid_subject" --subject-key-id=$root_cert_with_vid_subject_key_id --from=$trustee_account_5 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Propose add paa_cert_no_vid" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki propose-add-x509-root-cert --certificate="$paa_cert_no_vid_path" --vid="$vid_for_1_4" --from=$trustee_account_5 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve add paa_cert_no_vid" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki approve-add-x509-root-cert --subject="$paa_cert_no_vid_subject" --subject-key-id=$paa_cert_no_vid_subject_key_id --from=$trustee_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve add paa_cert_no_vid" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki approve-add-x509-root-cert --subject="$paa_cert_no_vid_subject" --subject-key-id=$paa_cert_no_vid_subject_key_id --from=$trustee_account_2 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve add paa_cert_no_vid" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki approve-add-x509-root-cert --subject="$paa_cert_no_vid_subject" --subject-key-id=$paa_cert_no_vid_subject_key_id --from=$trustee_account_3 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Propose root_cert" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki propose-add-x509-root-cert --certificate="$root_cert_path" --vid="$vid_for_1_4" --from=$trustee_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Add intermediate_cert_with_vid" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki add-x509-cert --certificate="$intermediate_cert_with_vid_path" --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Revoke intermediate_cert_with_vid" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki revoke-x509-cert --subject="$intermediate_cert_with_vid_subject" --subject-key-id="$intermediate_cert_with_vid_subject_key_id" --serial-number="$intermediate_cert_with_vid_serial_number" --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Propose revoke paa_cert_no_vid" result=$(echo "$passphrase" | $DCLD_BIN_NEW tx pki propose-revoke-x509-root-cert --subject="$paa_cert_no_vid_subject" --subject-key-id="$paa_cert_no_vid_subject_key_id" --from="$trustee_account_1" --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve revoke paa_cert_no_vid" result=$(echo "$passphrase" | $DCLD_BIN_NEW tx pki approve-revoke-x509-root-cert --subject="$paa_cert_no_vid_subject" --subject-key-id="$paa_cert_no_vid_subject_key_id" --from="$trustee_account_2" --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve revoke paa_cert_no_vid_path" result=$(echo "$passphrase" | $DCLD_BIN_NEW tx pki approve-revoke-x509-root-cert --subject="$paa_cert_no_vid_subject" --subject-key-id="$paa_cert_no_vid_subject_key_id" --from="$trustee_account_3" --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve revoke paa_cert_no_vid_path" result=$(echo "$passphrase" | $DCLD_BIN_NEW tx pki approve-revoke-x509-root-cert --subject="$paa_cert_no_vid_subject" --subject-key-id="$paa_cert_no_vid_subject_key_id" --from="$trustee_account_4" --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Propose revoke root_cert_with_vid" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki propose-revoke-x509-root-cert --subject="$root_cert_with_vid_subject" --subject-key-id="$root_cert_with_vid_subject_key_id" --from $trustee_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Add NOC Root certificate by vendor with VID = $vid_for_1_4" result=$(echo "$passphrase" | $DCLD_BIN_NEW tx pki add-noc-x509-root-cert --certificate="$noc_root_cert_1_path" --from $vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Add NOC ICA certificate by vendor with VID = $vid_for_1_4" result=$(echo "$passphrase" | $DCLD_BIN_NEW tx pki add-noc-x509-ica-cert --certificate="$noc_ica_cert_1_path" --from $vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Revoke NOC root certificate by vendor with VID = $vid_for_1_4" result=$(echo "$passphrase" | $DCLD_BIN_NEW tx pki revoke-noc-x509-root-cert --subject="$noc_root_cert_1_subject" --subject-key-id="$noc_root_cert_1_subject_key_id" --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Revoke NOC ICA certificate by vendor with VID = $vid_for_1_4" result=$(echo "$passphrase" | $DCLD_BIN_NEW tx pki revoke-noc-x509-ica-cert --subject="$noc_ica_cert_1_subject" --subject-key-id="$noc_ica_cert_1_subject_key_id" --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -792,6 +835,7 @@ test_divider echo "Add new revocation point for" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki add-revocation-point --vid=$vid_for_1_4 --revocation-type=1 --is-paa="true" --certificate="$root_cert_with_vid_path" --label="$product_label_for_1_4" --data-url="$test_data_url_for_1_4" --issuer-subject-key-id=$issuer_subject_key_id --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -799,18 +843,21 @@ test_divider echo "Update revocation point" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki update-revocation-point --vid=$vid_for_1_4 --certificate="$root_cert_with_vid_path" --label="$product_label_for_1_4" --data-url="$test_data_url_for_1_4/new" --issuer-subject-key-id=$issuer_subject_key_id --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Delete revocation point" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki delete-revocation-point --vid=$vid_for_1_4 --label="$product_label_for_1_4" --issuer-subject-key-id=$issuer_subject_key_id --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Add new revocation point" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki add-revocation-point --vid=$vid_for_1_4 --revocation-type=1 --is-paa="true" --certificate="$root_cert_with_vid_path" --label="$product_label_for_1_4" --data-url="$test_data_url_for_1_4" --issuer-subject-key-id=$issuer_subject_key_id --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider @@ -818,142 +865,164 @@ test_divider echo "Add revocation point for CRL SIGNER CERTIFICATE delegated by PAI" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki add-revocation-point --vid=$vid_for_1_4 --is-paa="false" --certificate="$crl_signer_delegated_by_pai_1" --label="$product_label_for_1_4" --data-url="$test_data_url_for_1_4" --issuer-subject-key-id=$delegator_cert_with_vid_subject_key_id --revocation-type=1 --certificate-delegator="$delegator_cert_with_vid_65521_path" --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Update revocation point for CRL SIGNER CERTIFICATE delegated by PAI" result=$(echo $passphrase | $DCLD_BIN_NEW tx pki update-revocation-point --vid=$vid_for_1_4 --certificate="$crl_signer_delegated_by_pai_1" --label="$product_label_for_1_4" --data-url="$test_data_url_for_1_4/new" --issuer-subject-key-id=$delegator_cert_with_vid_subject_key_id --certificate-delegator="$delegator_cert_with_vid_65521_path" --from=$vendor_account_for_1_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" # AUTH echo "Propose add account $user_7_address" result=$(echo $passphrase | $DCLD_BIN_NEW tx auth propose-add-account --address="$user_7_address" --pubkey="$user_7_pubkey" --roles="CertificationCenter" --from="$trustee_account_1" --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve add account $user_7_address" result=$($DCLD_BIN_NEW tx auth approve-add-account --address="$user_7_address" --from=$trustee_account_2 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve add account $user_7_address" result=$($DCLD_BIN_NEW tx auth approve-add-account --address="$user_7_address" --from=$trustee_account_3 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve add account $user_7_address" result=$($DCLD_BIN_NEW tx auth approve-add-account --address="$user_7_address" --from=$trustee_account_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Propose add account $user_8_address" result=$(echo $passphrase | $DCLD_BIN_NEW tx auth propose-add-account --address="$user_8_address" --pubkey=$user_8_pubkey --roles=CertificationCenter --from=$trustee_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve add account $user_8_address" result=$(echo $passphrase | $DCLD_BIN_NEW tx auth approve-add-account --address="$user_8_address" --from=$trustee_account_2 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve add account $user_8_address" result=$($DCLD_BIN_NEW tx auth approve-add-account --address="$user_8_address" --from=$trustee_account_3 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve add account $user_8_address" result=$($DCLD_BIN_NEW tx auth approve-add-account --address="$user_8_address" --from=$trustee_account_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Propose add account $user_9_address" result=$(echo $passphrase | $DCLD_BIN_NEW tx auth propose-add-account --address="$user_9_address" --pubkey=$user_9_pubkey --roles=CertificationCenter --from=$trustee_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Propose revoke account $user_7_address" result=$(echo $passphrase | $DCLD_BIN_NEW tx auth propose-revoke-account --address="$user_7_address" --from=$trustee_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve revoke account $user_7_address" result=$(echo $passphrase | $DCLD_BIN_NEW tx auth approve-revoke-account --address="$user_7_address" --from=$trustee_account_2 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve revoke account $user_7_address" result=$(echo $passphrase | $DCLD_BIN_NEW tx auth approve-revoke-account --address="$user_7_address" --from=$trustee_account_3 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve revoke account $user_7_address" result=$(echo $passphrase | $DCLD_BIN_NEW tx auth approve-revoke-account --address="$user_7_address" --from=$trustee_account_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Propose revoke account $user_8_address" result=$(echo $passphrase | $DCLD_BIN_NEW tx auth propose-revoke-account --address="$user_8_address" --from=$trustee_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider # VALIDATOR_NODE +result=$(docker exec "$container" /bin/sh -c "echo test1234 | dcld config broadcast-mode sync") + echo "Disable node" -# FIXME: use proper binary (not dcld but $DCLD_BIN_OLD) result=$(docker exec "$container" /bin/sh -c "echo test1234 | dcld tx validator disable-node --from=$account --yes") +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Enable node" -# FIXME: use proper binary (not dcld but $DCLD_BIN_OLD) result=$(docker exec "$container" /bin/sh -c "echo test1234 | dcld tx validator enable-node --from=$account --yes") +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve disable node" result=$(echo $passphrase | $DCLD_BIN_NEW tx validator approve-disable-node --address=$validator_address --from=$trustee_account_2 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve disable node" result=$(echo $passphrase | $DCLD_BIN_NEW tx validator approve-disable-node --address=$validator_address --from=$trustee_account_3 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Approve disable node" result=$(echo $passphrase | $DCLD_BIN_NEW tx validator approve-disable-node --address=$validator_address --from=$trustee_account_4 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Enable node" -# FIXME: use proper binary (not dcld but $DCLD_BIN_OLD) result=$(docker exec "$container" /bin/sh -c "echo test1234 | dcld tx validator enable-node --from=$account --yes") +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider echo "Propose disable node" -result=$(echo $passphrase | $DCLD_BIN_OLD tx validator propose-disable-node --address=$validator_address --from=$trustee_account_1 --yes) +result=$(echo $passphrase | $DCLD_BIN_NEW tx validator propose-disable-node --address=$validator_address --from=$trustee_account_1 --yes) +result=$(get_txn_result "$result") check_response "$result" "\"code\": 0" test_divider diff --git a/integration_tests/upgrade/add-new-node-after-upgrade.sh b/integration_tests/upgrade/add-new-node-after-upgrade.sh old mode 100644 new mode 100755 index 62ff587dc..c675ba4d9 --- a/integration_tests/upgrade/add-new-node-after-upgrade.sh +++ b/integration_tests/upgrade/add-new-node-after-upgrade.sh @@ -50,7 +50,7 @@ trap cleanup EXIT check_adding_new_node() { local stable_binary_version="${1:-0.12.1}" - local latest_binary_version="${2:-1.4.0-dev1}" + local latest_binary_version="${2:-1.4.0-dev3}" echo "1. run $node_name container" docker run -d --name $node_name --ip $ip -p "$node_p2p_port-$node_client_port:26656-26657" --network $docker_network -i dcledger @@ -81,7 +81,7 @@ check_adding_new_node() { test_divider echo "5. Start Node \"$node_name\"" - docker exec -d $node_name cosmovisor start + docker exec -d $node_name cosmovisor run start test_divider diff --git a/integration_tests/upgrade/test-upgrade.sh b/integration_tests/upgrade/test-upgrade.sh old mode 100644 new mode 100755 diff --git a/integration_tests/utils/client.go b/integration_tests/utils/client.go index 05e5bc878..5c6a23307 100644 --- a/integration_tests/utils/client.go +++ b/integration_tests/utils/client.go @@ -17,7 +17,7 @@ package utils import ( "bytes" "fmt" - "io/ioutil" + "io" "net/http" "strings" ) @@ -103,5 +103,5 @@ func sendRequest(uri string, method string, body []byte, account string, passphr func ReadResponseBody(resp *http.Response) ([]byte, error) { defer resp.Body.Close() - return ioutil.ReadAll(resp.Body) + return io.ReadAll(resp.Body) } diff --git a/integration_tests/utils/common.go b/integration_tests/utils/common.go index 302baf5e0..49f8a1ec3 100644 --- a/integration_tests/utils/common.go +++ b/integration_tests/utils/common.go @@ -22,7 +22,7 @@ import ( ) func RandString() string { - rand.Seed(time.Now().UnixNano()) + rand.Seed(time.Now().UnixNano()) //nolint:staticcheck chars := []rune("ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789") diff --git a/integration_tests/utils/grpc_rest.go b/integration_tests/utils/grpc_rest.go index 0e9d98cd3..2ce8c4cc7 100644 --- a/integration_tests/utils/grpc_rest.go +++ b/integration_tests/utils/grpc_rest.go @@ -18,23 +18,28 @@ import ( "bufio" "context" "errors" + "fmt" "os" "path/filepath" + "strings" "testing" + "time" + "github.com/cosmos/gogoproto/proto" + "github.com/stretchr/testify/require" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials/insecure" + + sdkerrors "cosmossdk.io/errors" clienttx "github.com/cosmos/cosmos-sdk/client/tx" + "github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/crypto/keyring" - "github.com/cosmos/cosmos-sdk/simapp" - simappparams "github.com/cosmos/cosmos-sdk/simapp/params" sdk "github.com/cosmos/cosmos-sdk/types" - sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/cosmos/cosmos-sdk/types/tx" - //nolint:staticcheck - "github.com/golang/protobuf/proto" - "github.com/stretchr/testify/require" + "github.com/zigbee-alliance/distributed-compliance-ledger/app" + appparams "github.com/zigbee-alliance/distributed-compliance-ledger/app/params" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" - "google.golang.org/grpc" ) // NOTE @@ -44,7 +49,7 @@ import ( type TestSuite struct { T *testing.T - EncodingConfig simappparams.EncodingConfig + EncodingConfig appparams.EncodingConfig ChainID string Kr keyring.Keyring Txf clienttx.Factory @@ -54,8 +59,9 @@ type TestSuite struct { func (suite *TestSuite) GetGRPCConn() *grpc.ClientConn { // Create a connection to the gRPC server. grpcConn, err := grpc.Dial( - "127.0.0.1:26630", // Or your gRPC server address. - grpc.WithInsecure(), // The SDK doesn't support any transport security mechanism. + "127.0.0.1:26630", // Or your gRPC server address. + grpc.WithTransportCredentials(insecure.NewCredentials()), // The SDK doesn't support any transport security mechanism. + grpc.WithDefaultCallOptions(grpc.ForceCodec(codec.NewProtoCodec(suite.EncodingConfig.InterfaceRegistry).GRPCCodec())), ) require.NoError(suite.T, err) @@ -71,10 +77,10 @@ func SetupTest(t *testing.T, chainID string, rest bool) (suite TestSuite) { require.NoError(t, err) homeDir := filepath.Join(userHomeDir, ".dcl") + encConfig := app.MakeEncodingConfig() - kr, _ := keyring.New(sdk.KeyringServiceName(), keyring.BackendTest, homeDir, inBuf) + kr, _ := keyring.New(sdk.KeyringServiceName(), keyring.BackendTest, homeDir, inBuf, encConfig.Codec) - encConfig := simapp.MakeTestEncodingConfig() dclauthtypes.RegisterInterfaces(encConfig.InterfaceRegistry) txCfg := encConfig.TxConfig @@ -98,7 +104,9 @@ func (suite *TestSuite) GetAddress(uid string) sdk.AccAddress { signerInfo, err := suite.Kr.Key(uid) require.NoError(suite.T, err) - return signerInfo.GetAddress() + address, _ := signerInfo.GetAddress() + + return address } // Generates Protobuf-encoded bytes. @@ -129,27 +137,40 @@ func (suite *TestSuite) BuildTx( return txBytes } +//nolint:nosnakecase func (suite *TestSuite) BroadcastTx(txBytes []byte) (*sdk.TxResponse, error) { - var broadcastResp *tx.BroadcastTxResponse + var txResponse *tx.GetTxResponse var err error body := tx.BroadcastTxRequest{ - Mode: tx.BroadcastMode_BROADCAST_MODE_BLOCK, + Mode: tx.BroadcastMode_BROADCAST_MODE_SYNC, //nolint:nosnakecase TxBytes: txBytes, } - if suite.Rest { - var _resp tx.BroadcastTxResponse + if suite.Rest { //nolint:nestif + var _getResp tx.GetTxResponse + var _brdResp tx.BroadcastTxResponse - bodyBytes, err := suite.EncodingConfig.Marshaler.MarshalJSON(&body) + bodyBytes, err := suite.EncodingConfig.Codec.MarshalJSON(&body) require.NoError(suite.T, err) respBytes, err := SendPostRequest("/cosmos/tx/v1beta1/txs", bodyBytes, "", "") + require.NoError(suite.T, err) + require.NoError(suite.T, suite.EncodingConfig.Codec.UnmarshalJSON(respBytes, &_brdResp)) + + for i := 1; i <= 10; i++ { + respBytes, err = SendGetRequest(fmt.Sprintf("/cosmos/tx/v1beta1/txs/%s", _brdResp.GetTxResponse().TxHash)) + if err == nil { + require.NoError(suite.T, suite.EncodingConfig.Codec.UnmarshalJSON(respBytes, &_getResp)) + + break + } + time.Sleep(2 * time.Second) + } if err != nil { return nil, err } - require.NoError(suite.T, suite.EncodingConfig.Marshaler.UnmarshalJSON(respBytes, &_resp)) - broadcastResp = &_resp + txResponse = &_getResp } else { grpcConn := suite.GetGRPCConn() defer grpcConn.Close() @@ -157,13 +178,24 @@ func (suite *TestSuite) BroadcastTx(txBytes []byte) (*sdk.TxResponse, error) { // Broadcast the tx via gRPC. We create a new client for the Protobuf Tx // service. txClient := tx.NewServiceClient(grpcConn) - broadcastResp, err = txClient.BroadcastTx(context.Background(), &body) + broadCastResponse, err := txClient.BroadcastTx(context.Background(), &body) + if err != nil { + return nil, err + } + + for i := 1; i <= 10; i++ { + txResponse, err = txClient.GetTx(context.Background(), &tx.GetTxRequest{Hash: broadCastResponse.GetTxResponse().TxHash}) + if err == nil && !strings.Contains(txResponse.TxResponse.RawLog, "tx not found") { + break + } + time.Sleep(2 * time.Second) + } if err != nil { return nil, err } } - resp := broadcastResp.TxResponse + resp := txResponse.TxResponse if resp.Code != 0 { err = sdkerrors.ABCIError(resp.Codespace, resp.Code, resp.RawLog) @@ -188,20 +220,21 @@ func (suite *TestSuite) QueryREST(uri string, resp proto.Message) error { return err } - require.NoError(suite.T, suite.EncodingConfig.Marshaler.UnmarshalJSON(respBytes, resp)) + require.NoError(suite.T, suite.EncodingConfig.Codec.UnmarshalJSON(respBytes, resp)) return nil } func (suite *TestSuite) AssertNotFound(err error) { require.Error(suite.T, err) - require.Contains(suite.T, err.Error(), "rpc error: code = NotFound desc = not found") + require.Contains(suite.T, err.Error(), "not found") if suite.Rest { var resterr *RESTError if !errors.As(err, &resterr) { panic("REST error is not RESTError type") } - require.Equal(suite.T, resterr.resp.StatusCode, 404) + + require.Equal(suite.T, 404, resterr.resp.StatusCode) } } diff --git a/integration_tests/utils/sign_helper.go b/integration_tests/utils/sign_helper.go index 9806fb3c1..d22584cd5 100644 --- a/integration_tests/utils/sign_helper.go +++ b/integration_tests/utils/sign_helper.go @@ -7,9 +7,9 @@ import ( ) // GenTx generates a signed mock transaction. -func GenTx(txf clienttx.Factory, gen client.TxConfig, msgs []sdk.Msg, signer string) (sdk.Tx, error) { +func GenTx(txf clienttx.Factory, _ client.TxConfig, msgs []sdk.Msg, signer string) (sdk.Tx, error) { // tx, err := txf.BuildUnsignedTx(msgs...) - tx, err := clienttx.BuildUnsignedTx(txf, msgs...) + tx, err := txf.BuildUnsignedTx(msgs...) if err != nil { return nil, err } diff --git a/proto/buf.gen.gogo.yaml b/proto/buf.gen.gogo.yaml new file mode 100644 index 000000000..0ace1a95a --- /dev/null +++ b/proto/buf.gen.gogo.yaml @@ -0,0 +1,18 @@ +# This file is auto-generated from Ignite. You can edit +# the file content but do not change the file name or path. +# +# buf.gen.gogo.yaml +# +version: v1 +plugins: + - name: gocosmos + out: .. + opt: + - plugins=grpc + - Mgoogle/protobuf/any.proto=github.com/cosmos/cosmos-sdk/codec/types + - Mcosmos/orm/v1/orm.proto=cosmossdk.io/orm + - name: grpc-gateway + out: .. + opt: + - logtostderr=true + - allow_colon_final_segments=true diff --git a/proto/buf.gen.pulsar.yaml b/proto/buf.gen.pulsar.yaml new file mode 100644 index 000000000..1a203e60a --- /dev/null +++ b/proto/buf.gen.pulsar.yaml @@ -0,0 +1,18 @@ +# This file is auto-generated from Ignite. You can edit +# the file content but do not change the file name or path. +# +# buf.gen.pulsar.yaml +# +version: v1 +managed: + enabled: true + go_package_prefix: + default: github.com/zigbee-alliance/distributed-compliance-ledger + except: + - buf.build/googleapis/googleapis + - buf.build/cosmos/gogo-proto + - buf.build/cosmos/cosmos-proto + override: +plugins: + - name: go-grpc + out: .. diff --git a/proto/buf.gen.sta.yaml b/proto/buf.gen.sta.yaml new file mode 100644 index 000000000..aa88b8330 --- /dev/null +++ b/proto/buf.gen.sta.yaml @@ -0,0 +1,15 @@ +# This file is auto-generated from Ignite. You can edit +# the file content but do not change the file name or path. +# +# buf.gen.sta.yaml +# +version: v1 +plugins: + - name: openapiv2 + out: . + opt: + - logtostderr=true + - openapi_naming_strategy=simple + - ignore_comments=true + - simple_operation_ids=true + - json_names_for_fields=false diff --git a/proto/buf.gen.swagger.yaml b/proto/buf.gen.swagger.yaml new file mode 100644 index 000000000..6c6e0fb4b --- /dev/null +++ b/proto/buf.gen.swagger.yaml @@ -0,0 +1,15 @@ +# This file is auto-generated from Ignite. You can edit +# the file content but do not change the file name or path. +# +# buf.gen.swagger.yaml +# +version: v1 +plugins: + - name: openapiv2 + out: . + opt: + - logtostderr=true + - openapi_naming_strategy=fqn + - json_names_for_fields=false + - simple_operation_ids=true + - generate_unbound_methods=true \ No newline at end of file diff --git a/proto/buf.gen.ts.yaml b/proto/buf.gen.ts.yaml new file mode 100644 index 000000000..84059c1f9 --- /dev/null +++ b/proto/buf.gen.ts.yaml @@ -0,0 +1,21 @@ +# This file is auto-generated from Ignite. You can edit +# the file content but do not change the file name or path. +# +# buf.gen.ts.yaml +# +version: v1 +managed: + enabled: true +plugins: + - plugin: buf.build/community/stephenh-ts-proto + out: . + strategy: all + opt: + - logtostderr=true + - allow_merge=true + - json_names_for_fields=false + - ts_proto_opt=snakeToCamel=true + - ts_proto_opt=Mgoogle/protobuf/any.proto=github.com/cosmos/cosmos-sdk/codec/types + - ts_proto_opt=Mcosmos/orm/v1/orm.proto=cosmossdk.io/orm + - ts_proto_opt=esModuleInterop=true + - ts_proto_out=. diff --git a/proto/buf.lock b/proto/buf.lock new file mode 100644 index 000000000..77287f75c --- /dev/null +++ b/proto/buf.lock @@ -0,0 +1,31 @@ +# This file is auto-generated from Ignite. +# DO NOT EDIT +# +# buf.lock +# +version: v1 +deps: + - remote: buf.build + owner: cosmos + repository: cosmos-proto + commit: 1935555c206d4afb9e94615dfd0fad31 + - remote: buf.build + owner: cosmos + repository: cosmos-sdk + commit: 954f7b05f38440fc8250134b15adec47 + - remote: buf.build + owner: cosmos + repository: gogo-proto + commit: 34d970b699f84aa382f3c29773a60836 + - remote: buf.build + owner: cosmos + repository: ics23 + commit: 3c44d8daa8b44059ac744cd17d4a49d7 + - remote: buf.build + owner: googleapis + repository: googleapis + commit: 75b4300737fb4efca0831636be94e517 + - remote: buf.build + owner: protocolbuffers + repository: wellknowntypes + commit: 44e83bc050a4497fa7b36b34d95ca156 diff --git a/proto/buf.yaml b/proto/buf.yaml new file mode 100644 index 000000000..7a86adcfa --- /dev/null +++ b/proto/buf.yaml @@ -0,0 +1,29 @@ +# This file is auto-generated from Ignite. You can edit +# the file content but do not change the file name or path. +# +# buf.yaml +# +version: v1 +deps: + - buf.build/protocolbuffers/wellknowntypes + - buf.build/cosmos/cosmos-sdk + - buf.build/cosmos/cosmos-proto + - buf.build/cosmos/gogo-proto + - buf.build/googleapis/googleapis + - buf.build/cosmos/ics23 +breaking: + use: + - FILE +lint: + use: + - DEFAULT + - COMMENTS + - FILE_LOWER_SNAKE_CASE + except: + - UNARY_RPC + - COMMENT_FIELD + - SERVICE_SUFFIX + - PACKAGE_VERSION_SUFFIX + - RPC_REQUEST_STANDARD_NAME + ignore: + - tendermint diff --git a/proto/common/uint16_range.proto b/proto/zigbeealliance/distributedcomplianceledger/common/uint16_range.proto similarity index 100% rename from proto/common/uint16_range.proto rename to proto/zigbeealliance/distributedcomplianceledger/common/uint16_range.proto diff --git a/proto/compliance/certified_model.proto b/proto/zigbeealliance/distributedcomplianceledger/compliance/certified_model.proto similarity index 100% rename from proto/compliance/certified_model.proto rename to proto/zigbeealliance/distributedcomplianceledger/compliance/certified_model.proto diff --git a/proto/compliance/compliance_history_item.proto b/proto/zigbeealliance/distributedcomplianceledger/compliance/compliance_history_item.proto similarity index 100% rename from proto/compliance/compliance_history_item.proto rename to proto/zigbeealliance/distributedcomplianceledger/compliance/compliance_history_item.proto diff --git a/proto/compliance/compliance_info.proto b/proto/zigbeealliance/distributedcomplianceledger/compliance/compliance_info.proto similarity index 91% rename from proto/compliance/compliance_info.proto rename to proto/zigbeealliance/distributedcomplianceledger/compliance/compliance_info.proto index 2e3c2aa86..46469870f 100644 --- a/proto/compliance/compliance_info.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/compliance/compliance_info.proto @@ -3,7 +3,7 @@ package zigbeealliance.distributedcomplianceledger.compliance; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types"; -import "compliance/compliance_history_item.proto"; +import "zigbeealliance/distributedcomplianceledger/compliance/compliance_history_item.proto"; import "cosmos_proto/cosmos.proto"; message ComplianceInfo { diff --git a/proto/compliance/device_software_compliance.proto b/proto/zigbeealliance/distributedcomplianceledger/compliance/device_software_compliance.proto similarity index 77% rename from proto/compliance/device_software_compliance.proto rename to proto/zigbeealliance/distributedcomplianceledger/compliance/device_software_compliance.proto index fa3ff6a32..550f5b624 100644 --- a/proto/compliance/device_software_compliance.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/compliance/device_software_compliance.proto @@ -3,7 +3,7 @@ package zigbeealliance.distributedcomplianceledger.compliance; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types"; -import "compliance/compliance_info.proto"; +import "zigbeealliance/distributedcomplianceledger/compliance/compliance_info.proto"; message DeviceSoftwareCompliance { string cDCertificateId = 1; diff --git a/proto/compliance/genesis.proto b/proto/zigbeealliance/distributedcomplianceledger/compliance/genesis.proto similarity index 66% rename from proto/compliance/genesis.proto rename to proto/zigbeealliance/distributedcomplianceledger/compliance/genesis.proto index 8f5c68118..0c0a55a7d 100644 --- a/proto/compliance/genesis.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/compliance/genesis.proto @@ -1,11 +1,11 @@ syntax = "proto3"; package zigbeealliance.distributedcomplianceledger.compliance; -import "compliance/compliance_info.proto"; -import "compliance/certified_model.proto"; -import "compliance/revoked_model.proto"; -import "compliance/provisional_model.proto"; -import "compliance/device_software_compliance.proto"; +import "zigbeealliance/distributedcomplianceledger/compliance/compliance_info.proto"; +import "zigbeealliance/distributedcomplianceledger/compliance/certified_model.proto"; +import "zigbeealliance/distributedcomplianceledger/compliance/revoked_model.proto"; +import "zigbeealliance/distributedcomplianceledger/compliance/provisional_model.proto"; +import "zigbeealliance/distributedcomplianceledger/compliance/device_software_compliance.proto"; // this line is used by starport scaffolding # genesis/proto/import import "gogoproto/gogo.proto"; diff --git a/proto/compliance/provisional_model.proto b/proto/zigbeealliance/distributedcomplianceledger/compliance/provisional_model.proto similarity index 100% rename from proto/compliance/provisional_model.proto rename to proto/zigbeealliance/distributedcomplianceledger/compliance/provisional_model.proto diff --git a/proto/compliance/query.proto b/proto/zigbeealliance/distributedcomplianceledger/compliance/query.proto similarity index 92% rename from proto/compliance/query.proto rename to proto/zigbeealliance/distributedcomplianceledger/compliance/query.proto index 738520588..4516b6fc6 100644 --- a/proto/compliance/query.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/compliance/query.proto @@ -3,11 +3,11 @@ package zigbeealliance.distributedcomplianceledger.compliance; import "google/api/annotations.proto"; import "cosmos/base/query/v1beta1/pagination.proto"; -import "compliance/compliance_info.proto"; -import "compliance/certified_model.proto"; -import "compliance/revoked_model.proto"; -import "compliance/provisional_model.proto"; -import "compliance/device_software_compliance.proto"; +import "zigbeealliance/distributedcomplianceledger/compliance/compliance_info.proto"; +import "zigbeealliance/distributedcomplianceledger/compliance/certified_model.proto"; +import "zigbeealliance/distributedcomplianceledger/compliance/revoked_model.proto"; +import "zigbeealliance/distributedcomplianceledger/compliance/provisional_model.proto"; +import "zigbeealliance/distributedcomplianceledger/compliance/device_software_compliance.proto"; // this line is used by starport scaffolding # 1 import "gogoproto/gogo.proto"; diff --git a/proto/compliance/revoked_model.proto b/proto/zigbeealliance/distributedcomplianceledger/compliance/revoked_model.proto similarity index 100% rename from proto/compliance/revoked_model.proto rename to proto/zigbeealliance/distributedcomplianceledger/compliance/revoked_model.proto diff --git a/proto/compliance/tx.proto b/proto/zigbeealliance/distributedcomplianceledger/compliance/tx.proto similarity index 100% rename from proto/compliance/tx.proto rename to proto/zigbeealliance/distributedcomplianceledger/compliance/tx.proto diff --git a/proto/dclauth/account.proto b/proto/zigbeealliance/distributedcomplianceledger/dclauth/account.proto similarity index 85% rename from proto/dclauth/account.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclauth/account.proto index 14e2e0562..f1213e518 100644 --- a/proto/dclauth/account.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/dclauth/account.proto @@ -5,8 +5,8 @@ option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/ import "gogoproto/gogo.proto"; import "cosmos/auth/v1beta1/auth.proto"; -import "common/uint16_range.proto"; -import "dclauth/grant.proto"; +import "zigbeealliance/distributedcomplianceledger/common/uint16_range.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/grant.proto"; message Account { diff --git a/proto/dclauth/account_stat.proto b/proto/zigbeealliance/distributedcomplianceledger/dclauth/account_stat.proto similarity index 100% rename from proto/dclauth/account_stat.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclauth/account_stat.proto diff --git a/proto/dclauth/genesis.proto b/proto/zigbeealliance/distributedcomplianceledger/dclauth/genesis.proto similarity index 63% rename from proto/dclauth/genesis.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclauth/genesis.proto index f68cf4d81..88efceb25 100644 --- a/proto/dclauth/genesis.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/dclauth/genesis.proto @@ -1,12 +1,12 @@ syntax = "proto3"; package zigbeealliance.distributedcomplianceledger.dclauth; -import "dclauth/account.proto"; -import "dclauth/pending_account.proto"; -import "dclauth/pending_account_revocation.proto"; -import "dclauth/account_stat.proto"; -import "dclauth/revoked_account.proto"; -import "dclauth/rejected_account.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/account.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/pending_account.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/pending_account_revocation.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/account_stat.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/revoked_account.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/rejected_account.proto"; // this line is used by starport scaffolding # genesis/proto/import import "gogoproto/gogo.proto"; diff --git a/proto/dclauth/grant.proto b/proto/zigbeealliance/distributedcomplianceledger/dclauth/grant.proto similarity index 100% rename from proto/dclauth/grant.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclauth/grant.proto diff --git a/proto/dclauth/pending_account.proto b/proto/zigbeealliance/distributedcomplianceledger/dclauth/pending_account.proto similarity index 85% rename from proto/dclauth/pending_account.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclauth/pending_account.proto index 1d5fd1899..6817024ec 100644 --- a/proto/dclauth/pending_account.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/dclauth/pending_account.proto @@ -4,7 +4,7 @@ package zigbeealliance.distributedcomplianceledger.dclauth; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types"; import "gogoproto/gogo.proto"; -import "dclauth/account.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/account.proto"; message PendingAccount { // TODO issue 99: do we need that ??? diff --git a/proto/dclauth/pending_account_revocation.proto b/proto/zigbeealliance/distributedcomplianceledger/dclauth/pending_account_revocation.proto similarity index 85% rename from proto/dclauth/pending_account_revocation.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclauth/pending_account_revocation.proto index 0ce68d342..0e9b29e84 100644 --- a/proto/dclauth/pending_account_revocation.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/dclauth/pending_account_revocation.proto @@ -5,7 +5,7 @@ option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/ import "gogoproto/gogo.proto"; import "cosmos_proto/cosmos.proto"; -import "dclauth/grant.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/grant.proto"; message PendingAccountRevocation { option (gogoproto.goproto_stringer) = false; diff --git a/proto/dclauth/query.proto b/proto/zigbeealliance/distributedcomplianceledger/dclauth/query.proto similarity index 91% rename from proto/dclauth/query.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclauth/query.proto index ba628b68c..fb71b08e9 100644 --- a/proto/dclauth/query.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/dclauth/query.proto @@ -3,12 +3,12 @@ package zigbeealliance.distributedcomplianceledger.dclauth; import "google/api/annotations.proto"; import "cosmos/base/query/v1beta1/pagination.proto"; -import "dclauth/account.proto"; -import "dclauth/pending_account.proto"; -import "dclauth/pending_account_revocation.proto"; -import "dclauth/account_stat.proto"; -import "dclauth/revoked_account.proto"; -import "dclauth/rejected_account.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/account.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/pending_account.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/pending_account_revocation.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/account_stat.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/revoked_account.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/rejected_account.proto"; // this line is used by starport scaffolding # 1 import "gogoproto/gogo.proto"; diff --git a/proto/dclauth/rejected_account.proto b/proto/zigbeealliance/distributedcomplianceledger/dclauth/rejected_account.proto similarity index 79% rename from proto/dclauth/rejected_account.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclauth/rejected_account.proto index 277e0d8c2..2a357e3fc 100644 --- a/proto/dclauth/rejected_account.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/dclauth/rejected_account.proto @@ -4,8 +4,7 @@ package zigbeealliance.distributedcomplianceledger.dclauth; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types"; import "gogoproto/gogo.proto"; -import "dclauth/account.proto"; -import "dclauth/grant.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/account.proto"; message RejectedAccount { Account account = 1 [(gogoproto.embed) = true]; diff --git a/proto/dclauth/revoked_account.proto b/proto/zigbeealliance/distributedcomplianceledger/dclauth/revoked_account.proto similarity index 73% rename from proto/dclauth/revoked_account.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclauth/revoked_account.proto index 8ce6d5025..f1098b335 100644 --- a/proto/dclauth/revoked_account.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/dclauth/revoked_account.proto @@ -4,8 +4,8 @@ package zigbeealliance.distributedcomplianceledger.dclauth; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types"; import "gogoproto/gogo.proto"; -import "dclauth/account.proto"; -import "dclauth/grant.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/account.proto"; +import "zigbeealliance/distributedcomplianceledger/dclauth/grant.proto"; message RevokedAccount { Account account = 1 [(gogoproto.embed) = true]; diff --git a/proto/dclauth/tx.proto b/proto/zigbeealliance/distributedcomplianceledger/dclauth/tx.proto similarity index 97% rename from proto/dclauth/tx.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclauth/tx.proto index 72c172244..c54ffd8b7 100644 --- a/proto/dclauth/tx.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/dclauth/tx.proto @@ -8,7 +8,7 @@ option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/ import "google/protobuf/any.proto"; import "gogoproto/gogo.proto"; import "cosmos_proto/cosmos.proto"; -import "common/uint16_range.proto"; +import "zigbeealliance/distributedcomplianceledger/common/uint16_range.proto"; // Msg defines the Msg service. service Msg { diff --git a/proto/dclgenutil/genesis.proto b/proto/zigbeealliance/distributedcomplianceledger/dclgenutil/genesis.proto similarity index 100% rename from proto/dclgenutil/genesis.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclgenutil/genesis.proto diff --git a/proto/dclgenutil/query.proto b/proto/zigbeealliance/distributedcomplianceledger/dclgenutil/query.proto similarity index 100% rename from proto/dclgenutil/query.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclgenutil/query.proto diff --git a/proto/dclgenutil/tx.proto b/proto/zigbeealliance/distributedcomplianceledger/dclgenutil/tx.proto similarity index 100% rename from proto/dclgenutil/tx.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclgenutil/tx.proto diff --git a/proto/dclupgrade/approved_upgrade.proto b/proto/zigbeealliance/distributedcomplianceledger/dclupgrade/approved_upgrade.proto similarity index 87% rename from proto/dclupgrade/approved_upgrade.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclupgrade/approved_upgrade.proto index f569b9bbb..ee17d0802 100644 --- a/proto/dclupgrade/approved_upgrade.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/dclupgrade/approved_upgrade.proto @@ -4,7 +4,7 @@ package zigbeealliance.distributedcomplianceledger.dclupgrade; import "gogoproto/gogo.proto"; import "cosmos_proto/cosmos.proto"; import "cosmos/upgrade/v1beta1/upgrade.proto"; -import "dclupgrade/grant.proto"; +import "zigbeealliance/distributedcomplianceledger/dclupgrade/grant.proto"; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types"; diff --git a/proto/dclupgrade/genesis.proto b/proto/zigbeealliance/distributedcomplianceledger/dclupgrade/genesis.proto similarity index 72% rename from proto/dclupgrade/genesis.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclupgrade/genesis.proto index 2e4d821c3..dcf20d665 100644 --- a/proto/dclupgrade/genesis.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/dclupgrade/genesis.proto @@ -2,9 +2,9 @@ syntax = "proto3"; package zigbeealliance.distributedcomplianceledger.dclupgrade; import "gogoproto/gogo.proto"; -import "dclupgrade/proposed_upgrade.proto"; -import "dclupgrade/approved_upgrade.proto"; -import "dclupgrade/rejected_upgrade.proto"; +import "zigbeealliance/distributedcomplianceledger/dclupgrade/proposed_upgrade.proto"; +import "zigbeealliance/distributedcomplianceledger/dclupgrade/approved_upgrade.proto"; +import "zigbeealliance/distributedcomplianceledger/dclupgrade/rejected_upgrade.proto"; // this line is used by starport scaffolding # genesis/proto/import option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types"; diff --git a/proto/dclupgrade/grant.proto b/proto/zigbeealliance/distributedcomplianceledger/dclupgrade/grant.proto similarity index 100% rename from proto/dclupgrade/grant.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclupgrade/grant.proto diff --git a/proto/dclupgrade/proposed_upgrade.proto b/proto/zigbeealliance/distributedcomplianceledger/dclupgrade/proposed_upgrade.proto similarity index 87% rename from proto/dclupgrade/proposed_upgrade.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclupgrade/proposed_upgrade.proto index ce4a4d73a..ed8599d5d 100644 --- a/proto/dclupgrade/proposed_upgrade.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/dclupgrade/proposed_upgrade.proto @@ -4,7 +4,7 @@ package zigbeealliance.distributedcomplianceledger.dclupgrade; import "gogoproto/gogo.proto"; import "cosmos_proto/cosmos.proto"; import "cosmos/upgrade/v1beta1/upgrade.proto"; -import "dclupgrade/grant.proto"; +import "zigbeealliance/distributedcomplianceledger/dclupgrade/grant.proto"; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types"; diff --git a/proto/dclupgrade/query.proto b/proto/zigbeealliance/distributedcomplianceledger/dclupgrade/query.proto similarity index 92% rename from proto/dclupgrade/query.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclupgrade/query.proto index 0ba9cce64..e3e2f7df1 100644 --- a/proto/dclupgrade/query.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/dclupgrade/query.proto @@ -4,9 +4,9 @@ package zigbeealliance.distributedcomplianceledger.dclupgrade; import "gogoproto/gogo.proto"; import "google/api/annotations.proto"; import "cosmos/base/query/v1beta1/pagination.proto"; -import "dclupgrade/proposed_upgrade.proto"; -import "dclupgrade/approved_upgrade.proto"; -import "dclupgrade/rejected_upgrade.proto"; +import "zigbeealliance/distributedcomplianceledger/dclupgrade/proposed_upgrade.proto"; +import "zigbeealliance/distributedcomplianceledger/dclupgrade/approved_upgrade.proto"; +import "zigbeealliance/distributedcomplianceledger/dclupgrade/rejected_upgrade.proto"; // this line is used by starport scaffolding # 1 option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types"; diff --git a/proto/dclupgrade/rejected_upgrade.proto b/proto/zigbeealliance/distributedcomplianceledger/dclupgrade/rejected_upgrade.proto similarity index 87% rename from proto/dclupgrade/rejected_upgrade.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclupgrade/rejected_upgrade.proto index 2a166e6a1..8e5977762 100644 --- a/proto/dclupgrade/rejected_upgrade.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/dclupgrade/rejected_upgrade.proto @@ -4,7 +4,7 @@ package zigbeealliance.distributedcomplianceledger.dclupgrade; import "gogoproto/gogo.proto"; import "cosmos_proto/cosmos.proto"; import "cosmos/upgrade/v1beta1/upgrade.proto"; -import "dclupgrade/grant.proto"; +import "zigbeealliance/distributedcomplianceledger/dclupgrade/grant.proto"; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types"; diff --git a/proto/dclupgrade/tx.proto b/proto/zigbeealliance/distributedcomplianceledger/dclupgrade/tx.proto similarity index 100% rename from proto/dclupgrade/tx.proto rename to proto/zigbeealliance/distributedcomplianceledger/dclupgrade/tx.proto diff --git a/proto/model/genesis.proto b/proto/zigbeealliance/distributedcomplianceledger/model/genesis.proto similarity index 70% rename from proto/model/genesis.proto rename to proto/zigbeealliance/distributedcomplianceledger/model/genesis.proto index c515742f0..e414e995d 100644 --- a/proto/model/genesis.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/model/genesis.proto @@ -1,10 +1,10 @@ syntax = "proto3"; package zigbeealliance.distributedcomplianceledger.model; -import "model/vendor_products.proto"; -import "model/model.proto"; -import "model/model_version.proto"; -import "model/model_versions.proto"; +import "zigbeealliance/distributedcomplianceledger/model/vendor_products.proto"; +import "zigbeealliance/distributedcomplianceledger/model/model.proto"; +import "zigbeealliance/distributedcomplianceledger/model/model_version.proto"; +import "zigbeealliance/distributedcomplianceledger/model/model_versions.proto"; // this line is used by starport scaffolding # genesis/proto/import import "gogoproto/gogo.proto"; diff --git a/proto/model/model.proto b/proto/zigbeealliance/distributedcomplianceledger/model/model.proto similarity index 100% rename from proto/model/model.proto rename to proto/zigbeealliance/distributedcomplianceledger/model/model.proto diff --git a/proto/model/model_version.proto b/proto/zigbeealliance/distributedcomplianceledger/model/model_version.proto similarity index 100% rename from proto/model/model_version.proto rename to proto/zigbeealliance/distributedcomplianceledger/model/model_version.proto diff --git a/proto/model/model_versions.proto b/proto/zigbeealliance/distributedcomplianceledger/model/model_versions.proto similarity index 100% rename from proto/model/model_versions.proto rename to proto/zigbeealliance/distributedcomplianceledger/model/model_versions.proto diff --git a/proto/model/product.proto b/proto/zigbeealliance/distributedcomplianceledger/model/product.proto similarity index 100% rename from proto/model/product.proto rename to proto/zigbeealliance/distributedcomplianceledger/model/product.proto diff --git a/proto/model/query.proto b/proto/zigbeealliance/distributedcomplianceledger/model/query.proto similarity index 88% rename from proto/model/query.proto rename to proto/zigbeealliance/distributedcomplianceledger/model/query.proto index 7c3a30b5f..be28cfc58 100644 --- a/proto/model/query.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/model/query.proto @@ -3,10 +3,10 @@ package zigbeealliance.distributedcomplianceledger.model; import "google/api/annotations.proto"; import "cosmos/base/query/v1beta1/pagination.proto"; -import "model/vendor_products.proto"; -import "model/model.proto"; -import "model/model_version.proto"; -import "model/model_versions.proto"; +import "zigbeealliance/distributedcomplianceledger/model/vendor_products.proto"; +import "zigbeealliance/distributedcomplianceledger/model/model.proto"; +import "zigbeealliance/distributedcomplianceledger/model/model_version.proto"; +import "zigbeealliance/distributedcomplianceledger/model/model_versions.proto"; // this line is used by starport scaffolding # 1 import "gogoproto/gogo.proto"; diff --git a/proto/model/tx.proto b/proto/zigbeealliance/distributedcomplianceledger/model/tx.proto similarity index 98% rename from proto/model/tx.proto rename to proto/zigbeealliance/distributedcomplianceledger/model/tx.proto index 8f1251cbb..09396cae6 100644 --- a/proto/model/tx.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/model/tx.proto @@ -1,8 +1,8 @@ syntax = "proto3"; package zigbeealliance.distributedcomplianceledger.model; -import "model/model.proto"; -import "model/model_version.proto"; +import "zigbeealliance/distributedcomplianceledger/model/model.proto"; +import "zigbeealliance/distributedcomplianceledger/model/model_version.proto"; // this line is used by starport scaffolding # proto/tx/import import "gogoproto/gogo.proto"; diff --git a/proto/model/vendor_products.proto b/proto/zigbeealliance/distributedcomplianceledger/model/vendor_products.proto similarity index 77% rename from proto/model/vendor_products.proto rename to proto/zigbeealliance/distributedcomplianceledger/model/vendor_products.proto index 5dd01a95a..a1dc3dc4b 100644 --- a/proto/model/vendor_products.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/model/vendor_products.proto @@ -2,7 +2,7 @@ syntax = "proto3"; package zigbeealliance.distributedcomplianceledger.model; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types"; -import "model/product.proto"; +import "zigbeealliance/distributedcomplianceledger/model/product.proto"; message VendorProducts { int32 vid = 1; diff --git a/proto/pki/approved_certificates.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/approved_certificates.proto similarity index 82% rename from proto/pki/approved_certificates.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/approved_certificates.proto index 65010d9f4..d3ee5c5cb 100644 --- a/proto/pki/approved_certificates.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/approved_certificates.proto @@ -3,7 +3,7 @@ package zigbeealliance.distributedcomplianceledger.pki; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types"; -import "pki/certificate.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/certificate.proto"; import "gogoproto/gogo.proto"; message ApprovedCertificates { diff --git a/proto/pki/approved_certificates_by_subject.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject.proto similarity index 100% rename from proto/pki/approved_certificates_by_subject.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject.proto diff --git a/proto/pki/approved_certificates_by_subject_key_id.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject_key_id.proto similarity index 78% rename from proto/pki/approved_certificates_by_subject_key_id.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject_key_id.proto index 82486062d..18348157d 100644 --- a/proto/pki/approved_certificates_by_subject_key_id.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject_key_id.proto @@ -3,7 +3,7 @@ package zigbeealliance.distributedcomplianceledger.pki; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types"; -import "pki/certificate.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/certificate.proto"; message ApprovedCertificatesBySubjectKeyId { string subjectKeyId = 1; diff --git a/proto/pki/approved_root_certificates.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/approved_root_certificates.proto similarity index 74% rename from proto/pki/approved_root_certificates.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/approved_root_certificates.proto index d211918e6..c2edf4aab 100644 --- a/proto/pki/approved_root_certificates.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/approved_root_certificates.proto @@ -3,7 +3,7 @@ package zigbeealliance.distributedcomplianceledger.pki; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types"; -import "pki/certificate_identifier.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/certificate_identifier.proto"; message ApprovedRootCertificates { repeated CertificateIdentifier certs = 1; diff --git a/proto/pki/certificate.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/certificate.proto similarity index 91% rename from proto/pki/certificate.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/certificate.proto index a021e7619..44416f8cc 100644 --- a/proto/pki/certificate.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/certificate.proto @@ -6,7 +6,7 @@ option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/ import "cosmos_proto/cosmos.proto"; import "gogoproto/gogo.proto"; -import "pki/grant.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/grant.proto"; message Certificate { diff --git a/proto/pki/certificate_identifier.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/certificate_identifier.proto similarity index 100% rename from proto/pki/certificate_identifier.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/certificate_identifier.proto diff --git a/proto/pki/child_certificates.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/child_certificates.proto similarity index 77% rename from proto/pki/child_certificates.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/child_certificates.proto index 5b0967fdf..6610e53a0 100644 --- a/proto/pki/child_certificates.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/child_certificates.proto @@ -3,7 +3,7 @@ package zigbeealliance.distributedcomplianceledger.pki; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types"; -import "pki/certificate_identifier.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/certificate_identifier.proto"; message ChildCertificates { string issuer = 1; diff --git a/proto/pki/genesis.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/genesis.proto similarity index 57% rename from proto/pki/genesis.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/genesis.proto index ffc2bd523..7c9cc2f70 100644 --- a/proto/pki/genesis.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/genesis.proto @@ -1,23 +1,23 @@ syntax = "proto3"; package zigbeealliance.distributedcomplianceledger.pki; -import "pki/approved_certificates.proto"; -import "pki/proposed_certificate.proto"; -import "pki/child_certificates.proto"; -import "pki/proposed_certificate_revocation.proto"; -import "pki/revoked_certificates.proto"; -import "pki/unique_certificate.proto"; -import "pki/approved_root_certificates.proto"; -import "pki/revoked_root_certificates.proto"; -import "pki/approved_certificates_by_subject.proto"; -import "pki/rejected_certificate.proto"; -import "pki/pki_revocation_distribution_point.proto"; -import "pki/pki_revocation_distribution_points_by_issuer_subject_key_id.proto"; -import "pki/approved_certificates_by_subject_key_id.proto"; -import "pki/noc_root_certificates.proto"; -import "pki/noc_ica_certificates.proto"; -import "pki/revoked_noc_root_certificates.proto"; -import "pki/noc_root_certificates_by_vid_and_skid.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/approved_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/proposed_certificate.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/child_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/proposed_certificate_revocation.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/revoked_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/unique_certificate.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/approved_root_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/revoked_root_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/rejected_certificate.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_point.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_points_by_issuer_subject_key_id.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject_key_id.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/noc_ica_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/revoked_noc_root_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates_by_vid_and_skid.proto"; // this line is used by starport scaffolding # genesis/proto/import import "gogoproto/gogo.proto"; diff --git a/proto/pki/grant.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/grant.proto similarity index 100% rename from proto/pki/grant.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/grant.proto diff --git a/proto/pki/noc_ica_certificates.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/noc_ica_certificates.proto similarity index 81% rename from proto/pki/noc_ica_certificates.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/noc_ica_certificates.proto index 5e94665e7..88833a743 100644 --- a/proto/pki/noc_ica_certificates.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/noc_ica_certificates.proto @@ -4,7 +4,7 @@ package zigbeealliance.distributedcomplianceledger.pki; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types"; import "gogoproto/gogo.proto"; -import "pki/certificate.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/certificate.proto"; message NocIcaCertificates { int32 vid = 1 [(gogoproto.moretags) = "validate:\"gte=1,lte=65535\""]; diff --git a/proto/pki/noc_root_certificates.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates.proto similarity index 82% rename from proto/pki/noc_root_certificates.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates.proto index 60a10d13b..488ce64f6 100644 --- a/proto/pki/noc_root_certificates.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates.proto @@ -4,7 +4,7 @@ package zigbeealliance.distributedcomplianceledger.pki; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types"; import "gogoproto/gogo.proto"; -import "pki/certificate.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/certificate.proto"; message NocRootCertificates { int32 vid = 1 [(gogoproto.moretags) = "validate:\"gte=1,lte=65535\""]; diff --git a/proto/pki/noc_root_certificates_by_vid_and_skid.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates_by_vid_and_skid.proto similarity index 84% rename from proto/pki/noc_root_certificates_by_vid_and_skid.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates_by_vid_and_skid.proto index 0a248e851..1b0b1fa02 100644 --- a/proto/pki/noc_root_certificates_by_vid_and_skid.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates_by_vid_and_skid.proto @@ -4,7 +4,7 @@ package zigbeealliance.distributedcomplianceledger.pki; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types"; import "gogoproto/gogo.proto"; -import "pki/certificate.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/certificate.proto"; message NocRootCertificatesByVidAndSkid { int32 vid = 1 [(gogoproto.moretags) = "validate:\"gte=1,lte=65535\""]; diff --git a/proto/pki/pki_revocation_distribution_point.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_point.proto similarity index 100% rename from proto/pki/pki_revocation_distribution_point.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_point.proto diff --git a/proto/pki/pki_revocation_distribution_points_by_issuer_subject_key_id.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_points_by_issuer_subject_key_id.proto similarity index 76% rename from proto/pki/pki_revocation_distribution_points_by_issuer_subject_key_id.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_points_by_issuer_subject_key_id.proto index eacb70355..c42657167 100644 --- a/proto/pki/pki_revocation_distribution_points_by_issuer_subject_key_id.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_points_by_issuer_subject_key_id.proto @@ -3,7 +3,7 @@ package zigbeealliance.distributedcomplianceledger.pki; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types"; -import "pki/pki_revocation_distribution_point.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_point.proto"; message PkiRevocationDistributionPointsByIssuerSubjectKeyID { string issuerSubjectKeyID = 1; diff --git a/proto/pki/proposed_certificate.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/proposed_certificate.proto similarity index 90% rename from proto/pki/proposed_certificate.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/proposed_certificate.proto index 1457535bf..2c6739fba 100644 --- a/proto/pki/proposed_certificate.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/proposed_certificate.proto @@ -5,7 +5,7 @@ option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/ import "cosmos_proto/cosmos.proto"; import "gogoproto/gogo.proto"; -import "pki/grant.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/grant.proto"; message ProposedCertificate { string subject = 1; diff --git a/proto/pki/proposed_certificate_revocation.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/proposed_certificate_revocation.proto similarity index 85% rename from proto/pki/proposed_certificate_revocation.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/proposed_certificate_revocation.proto index d4242893f..f4a90c679 100644 --- a/proto/pki/proposed_certificate_revocation.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/proposed_certificate_revocation.proto @@ -3,7 +3,7 @@ package zigbeealliance.distributedcomplianceledger.pki; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types"; -import "pki/grant.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/grant.proto"; message ProposedCertificateRevocation { string subject = 1; diff --git a/proto/pki/query.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/query.proto similarity index 90% rename from proto/pki/query.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/query.proto index 87e512a54..638d20794 100644 --- a/proto/pki/query.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/query.proto @@ -3,21 +3,21 @@ package zigbeealliance.distributedcomplianceledger.pki; import "google/api/annotations.proto"; import "cosmos/base/query/v1beta1/pagination.proto"; -import "pki/approved_certificates.proto"; -import "pki/proposed_certificate.proto"; -import "pki/child_certificates.proto"; -import "pki/proposed_certificate_revocation.proto"; -import "pki/revoked_certificates.proto"; -import "pki/approved_root_certificates.proto"; -import "pki/revoked_root_certificates.proto"; -import "pki/approved_certificates_by_subject.proto"; -import "pki/rejected_certificate.proto"; -import "pki/pki_revocation_distribution_point.proto"; -import "pki/pki_revocation_distribution_points_by_issuer_subject_key_id.proto"; -import "pki/noc_root_certificates.proto"; -import "pki/noc_ica_certificates.proto"; -import "pki/revoked_noc_root_certificates.proto"; -import "pki/noc_root_certificates_by_vid_and_skid.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/approved_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/proposed_certificate.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/child_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/proposed_certificate_revocation.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/revoked_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/approved_root_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/revoked_root_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/rejected_certificate.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_point.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_points_by_issuer_subject_key_id.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/noc_ica_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/revoked_noc_root_certificates.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates_by_vid_and_skid.proto"; // this line is used by starport scaffolding # 1 import "gogoproto/gogo.proto"; diff --git a/proto/pki/rejected_certificate.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/rejected_certificate.proto similarity index 80% rename from proto/pki/rejected_certificate.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/rejected_certificate.proto index cd57bd479..c58627558 100644 --- a/proto/pki/rejected_certificate.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/rejected_certificate.proto @@ -3,7 +3,7 @@ package zigbeealliance.distributedcomplianceledger.pki; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types"; -import "pki/certificate.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/certificate.proto"; message RejectedCertificate { string subject = 1; diff --git a/proto/pki/revoked_certificates.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/revoked_certificates.proto similarity index 80% rename from proto/pki/revoked_certificates.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/revoked_certificates.proto index 016e64a4e..9434df21a 100644 --- a/proto/pki/revoked_certificates.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/revoked_certificates.proto @@ -3,7 +3,7 @@ package zigbeealliance.distributedcomplianceledger.pki; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types"; -import "pki/certificate.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/certificate.proto"; message RevokedCertificates { string subject = 1; diff --git a/proto/pki/revoked_noc_root_certificates.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/revoked_noc_root_certificates.proto similarity index 79% rename from proto/pki/revoked_noc_root_certificates.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/revoked_noc_root_certificates.proto index 66ff441f4..c5f34aef5 100644 --- a/proto/pki/revoked_noc_root_certificates.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/revoked_noc_root_certificates.proto @@ -3,7 +3,7 @@ package zigbeealliance.distributedcomplianceledger.pki; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types"; -import "pki/certificate.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/certificate.proto"; message RevokedNocRootCertificates { string subject = 1; diff --git a/proto/pki/revoked_root_certificates.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/revoked_root_certificates.proto similarity index 74% rename from proto/pki/revoked_root_certificates.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/revoked_root_certificates.proto index 37af6b296..9ee282896 100644 --- a/proto/pki/revoked_root_certificates.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/pki/revoked_root_certificates.proto @@ -3,7 +3,7 @@ package zigbeealliance.distributedcomplianceledger.pki; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types"; -import "pki/certificate_identifier.proto"; +import "zigbeealliance/distributedcomplianceledger/pki/certificate_identifier.proto"; message RevokedRootCertificates { repeated CertificateIdentifier certs = 1; diff --git a/proto/pki/tx.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/tx.proto similarity index 100% rename from proto/pki/tx.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/tx.proto diff --git a/proto/pki/unique_certificate.proto b/proto/zigbeealliance/distributedcomplianceledger/pki/unique_certificate.proto similarity index 100% rename from proto/pki/unique_certificate.proto rename to proto/zigbeealliance/distributedcomplianceledger/pki/unique_certificate.proto diff --git a/proto/validator/description.proto b/proto/zigbeealliance/distributedcomplianceledger/validator/description.proto similarity index 100% rename from proto/validator/description.proto rename to proto/zigbeealliance/distributedcomplianceledger/validator/description.proto diff --git a/proto/validator/disabled_validator.proto b/proto/zigbeealliance/distributedcomplianceledger/validator/disabled_validator.proto similarity index 86% rename from proto/validator/disabled_validator.proto rename to proto/zigbeealliance/distributedcomplianceledger/validator/disabled_validator.proto index a26faeb86..550a18a39 100644 --- a/proto/validator/disabled_validator.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/validator/disabled_validator.proto @@ -2,7 +2,7 @@ syntax = "proto3"; package zigbeealliance.distributedcomplianceledger.validator; import "cosmos_proto/cosmos.proto"; -import "validator/grant.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/grant.proto"; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types"; diff --git a/proto/validator/genesis.proto b/proto/zigbeealliance/distributedcomplianceledger/validator/genesis.proto similarity index 66% rename from proto/validator/genesis.proto rename to proto/zigbeealliance/distributedcomplianceledger/validator/genesis.proto index 81310e6f2..460c6de27 100644 --- a/proto/validator/genesis.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/validator/genesis.proto @@ -1,11 +1,11 @@ syntax = "proto3"; package zigbeealliance.distributedcomplianceledger.validator; -import "validator/validator.proto"; -import "validator/last_validator_power.proto"; -import "validator/proposed_disable_validator.proto"; -import "validator/disabled_validator.proto"; -import "validator/rejected_validator.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/validator.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/last_validator_power.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/proposed_disable_validator.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/disabled_validator.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/rejected_validator.proto"; // this line is used by starport scaffolding # genesis/proto/import import "gogoproto/gogo.proto"; diff --git a/proto/validator/grant.proto b/proto/zigbeealliance/distributedcomplianceledger/validator/grant.proto similarity index 100% rename from proto/validator/grant.proto rename to proto/zigbeealliance/distributedcomplianceledger/validator/grant.proto diff --git a/proto/validator/last_validator_power.proto b/proto/zigbeealliance/distributedcomplianceledger/validator/last_validator_power.proto similarity index 100% rename from proto/validator/last_validator_power.proto rename to proto/zigbeealliance/distributedcomplianceledger/validator/last_validator_power.proto diff --git a/proto/validator/proposed_disable_validator.proto b/proto/zigbeealliance/distributedcomplianceledger/validator/proposed_disable_validator.proto similarity index 86% rename from proto/validator/proposed_disable_validator.proto rename to proto/zigbeealliance/distributedcomplianceledger/validator/proposed_disable_validator.proto index ac2e56280..63e5f7a93 100644 --- a/proto/validator/proposed_disable_validator.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/validator/proposed_disable_validator.proto @@ -2,7 +2,7 @@ syntax = "proto3"; package zigbeealliance.distributedcomplianceledger.validator; import "cosmos_proto/cosmos.proto"; -import "validator/grant.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/grant.proto"; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types"; diff --git a/proto/validator/query.proto b/proto/zigbeealliance/distributedcomplianceledger/validator/query.proto similarity index 92% rename from proto/validator/query.proto rename to proto/zigbeealliance/distributedcomplianceledger/validator/query.proto index c46687c31..295c475be 100644 --- a/proto/validator/query.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/validator/query.proto @@ -3,11 +3,11 @@ package zigbeealliance.distributedcomplianceledger.validator; import "google/api/annotations.proto"; import "cosmos/base/query/v1beta1/pagination.proto"; -import "validator/validator.proto"; -import "validator/last_validator_power.proto"; -import "validator/proposed_disable_validator.proto"; -import "validator/disabled_validator.proto"; -import "validator/rejected_validator.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/validator.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/last_validator_power.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/proposed_disable_validator.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/disabled_validator.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/rejected_validator.proto"; // this line is used by starport scaffolding # 1 import "gogoproto/gogo.proto"; diff --git a/proto/validator/rejected_validator.proto b/proto/zigbeealliance/distributedcomplianceledger/validator/rejected_validator.proto similarity index 86% rename from proto/validator/rejected_validator.proto rename to proto/zigbeealliance/distributedcomplianceledger/validator/rejected_validator.proto index 9046b84c2..9c596bfa8 100644 --- a/proto/validator/rejected_validator.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/validator/rejected_validator.proto @@ -2,7 +2,7 @@ syntax = "proto3"; package zigbeealliance.distributedcomplianceledger.validator; import "cosmos_proto/cosmos.proto"; -import "validator/grant.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/grant.proto"; option go_package = "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types"; diff --git a/proto/validator/tx.proto b/proto/zigbeealliance/distributedcomplianceledger/validator/tx.proto similarity index 97% rename from proto/validator/tx.proto rename to proto/zigbeealliance/distributedcomplianceledger/validator/tx.proto index d8c799d1f..f6e932253 100644 --- a/proto/validator/tx.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/validator/tx.proto @@ -2,7 +2,7 @@ syntax = "proto3"; package zigbeealliance.distributedcomplianceledger.validator; // this line is used by starport scaffolding # proto/tx/import -import "validator/description.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/description.proto"; import "google/protobuf/any.proto"; import "gogoproto/gogo.proto"; diff --git a/proto/validator/validator.proto b/proto/zigbeealliance/distributedcomplianceledger/validator/validator.proto similarity index 92% rename from proto/validator/validator.proto rename to proto/zigbeealliance/distributedcomplianceledger/validator/validator.proto index d9d524a6f..521ae9751 100644 --- a/proto/validator/validator.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/validator/validator.proto @@ -7,7 +7,7 @@ import "google/protobuf/any.proto"; import "gogoproto/gogo.proto"; import "cosmos_proto/cosmos.proto"; -import "validator/description.proto"; +import "zigbeealliance/distributedcomplianceledger/validator/description.proto"; message Validator { option (gogoproto.equal) = false; diff --git a/proto/vendorinfo/genesis.proto b/proto/zigbeealliance/distributedcomplianceledger/vendorinfo/genesis.proto similarity index 86% rename from proto/vendorinfo/genesis.proto rename to proto/zigbeealliance/distributedcomplianceledger/vendorinfo/genesis.proto index 2ac32008f..4aedab36a 100644 --- a/proto/vendorinfo/genesis.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/vendorinfo/genesis.proto @@ -1,7 +1,7 @@ syntax = "proto3"; package zigbeealliance.distributedcomplianceledger.vendorinfo; -import "vendorinfo/vendor_info.proto"; +import "zigbeealliance/distributedcomplianceledger/vendorinfo/vendor_info.proto"; // this line is used by starport scaffolding # genesis/proto/import import "gogoproto/gogo.proto"; diff --git a/proto/vendorinfo/query.proto b/proto/zigbeealliance/distributedcomplianceledger/vendorinfo/query.proto similarity index 94% rename from proto/vendorinfo/query.proto rename to proto/zigbeealliance/distributedcomplianceledger/vendorinfo/query.proto index f6fc5e55e..bd5df0509 100644 --- a/proto/vendorinfo/query.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/vendorinfo/query.proto @@ -3,7 +3,7 @@ package zigbeealliance.distributedcomplianceledger.vendorinfo; import "google/api/annotations.proto"; import "cosmos/base/query/v1beta1/pagination.proto"; -import "vendorinfo/vendor_info.proto"; +import "zigbeealliance/distributedcomplianceledger/vendorinfo/vendor_info.proto"; // this line is used by starport scaffolding # 1 import "gogoproto/gogo.proto"; diff --git a/proto/vendorinfo/tx.proto b/proto/zigbeealliance/distributedcomplianceledger/vendorinfo/tx.proto similarity index 96% rename from proto/vendorinfo/tx.proto rename to proto/zigbeealliance/distributedcomplianceledger/vendorinfo/tx.proto index 9f8a0394d..8893134be 100644 --- a/proto/vendorinfo/tx.proto +++ b/proto/zigbeealliance/distributedcomplianceledger/vendorinfo/tx.proto @@ -1,7 +1,7 @@ syntax = "proto3"; package zigbeealliance.distributedcomplianceledger.vendorinfo; -import "vendorinfo/vendor_info.proto"; +import "zigbeealliance/distributedcomplianceledger/vendorinfo/vendor_info.proto"; import "gogoproto/gogo.proto"; import "cosmos_proto/cosmos.proto"; // this line is used by starport scaffolding # proto/tx/import diff --git a/proto/vendorinfo/vendor_info.proto b/proto/zigbeealliance/distributedcomplianceledger/vendorinfo/vendor_info.proto similarity index 100% rename from proto/vendorinfo/vendor_info.proto rename to proto/zigbeealliance/distributedcomplianceledger/vendorinfo/vendor_info.proto diff --git a/scripts/Dockerfile b/scripts/Dockerfile index 27c19d7c0..6c6f44369 100644 --- a/scripts/Dockerfile +++ b/scripts/Dockerfile @@ -1,61 +1,34 @@ ################################################################################ -# Image of a machine with installed swagger-combine and starport. +# Image of a machine with installed swagger-combine, buf and ignite. ################################################################################ FROM node # Install golang ARG GO_VERSION -ENV GO_VERSION=${GO_VERSION:-1.18} +ENV GO_VERSION=${GO_VERSION:-1.20} ENV BASH_ENV=/etc/bashrc ENV PATH="${PATH}:/usr/local/go/bin" RUN curl -L https://golang.org/dl/go${GO_VERSION}.linux-amd64.tar.gz -o /tmp/go.tar.gz \ && rm -rf /usr/local/go \ - && tar -C /usr/local -xzf /tmp/go.tar.gz \ + && tar -C /usr/local -xzf /tmp/go.tar.gz \ && go version \ && rm -f /tmp/go.tar.gz -# Install protoc -ARG PROTOC_VERSION -ENV PROTOC_VERSION=${PROTOC_VERSION:-3.19.4} -RUN PROTOC_ZIP=protoc-${PROTOC_VERSION}-linux-x86_64.zip \ - && curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOC_VERSION}/${PROTOC_ZIP} \ - && unzip -o $PROTOC_ZIP -d /usr/local bin/protoc \ - && unzip -o $PROTOC_ZIP -d /usr/local 'include/*' \ - && rm -f $PROTOC_ZIP - -# Install grpc-gateway tools -ENV PROTOC_GEN_GRPC_GATEWAY_VERSION=v2.8.0 -ENV PROTOC_GEN_OPENAPIV2_VERSION=v2.8.0 -ENV PROTOC_GEN_SWAGGER_VERSION=v1.16.0 - -RUN go install \ - github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-grpc-gateway@${PROTOC_GEN_GRPC_GATEWAY_VERSION} \ - github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2@${PROTOC_GEN_OPENAPIV2_VERSION} - -RUN go install \ - github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger@${PROTOC_GEN_SWAGGER_VERSION} - +# Install swagger-combine RUN npm install -g swagger-combine - -# Install starport -ARG STARPORT_VERSION -ENV STARPORT_VERSION=${STARPORT_VERSION:-0.19.5} -#ENV STARPORT_VERSION=${STARPORT_VERSION:-dev} - -RUN if [ "$STARPORT_VERSION" = "dev" ]; then \ - curl -L https://github.com/tendermint/starport/archive/refs/heads/develop.zip -o /tmp/starport.zip && \ - cd /tmp \ - && unzip starport.zip \ - && cd starport-develop \ - && make build \ - && cp ./dist/starport /usr/local/bin; \ - else \ - curl https://get.starport.network/starport@v${STARPORT_VERSION}! -o /tmp/startport \ - && bash /tmp/startport \ - && rm /tmp/startport; \ - fi +# Install buf v1.29.0 +RUN cd tmp +RUN curl -L https://github.com/bufbuild/buf/releases/download/v1.29.0/buf-Linux-x86_64 -o buf && \ + chmod 777 buf && \ + cp buf /usr/local/bin +RUN go install github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2@v2.19.0 + +# Install ignite v0.27.2 +RUN curl -L https://github.com/ignite/cli/releases/download/v0.27.2/ignite_0.27.2_linux_amd64.tar.gz -o ignite.targ.gz && \ + tar -xvzf ignite.targ.gz && \ + cp ignite /usr/local/bin ENV PATH="/root/go/bin:${PATH}" ENV PATH="${PATH}:/usr/local/bin" diff --git a/scripts/README.md b/scripts/README.md index 6291301f8..953979b18 100644 --- a/scripts/README.md +++ b/scripts/README.md @@ -6,8 +6,7 @@ The [Dockerfile](./Dockerfile) might be handy for the following cases: -- when you want to use not yet release (dev) version of `starport` -- when you don't want to setup starport or swagger dependencies locally by some reason +- when you don't want to setup Ignite or Swagger dependencies locally by some reason `[Note]` If you want to install dependencies locally, try to use versions specified in [Dockerfile](./Dockerfile) to avoid errors while running scripts @@ -25,10 +24,15 @@ docker run -it -v "$PWD":/dcl /bin/bash ## Scripts -Build proto (for example `starport chain build`). +Build proto (for example `buf generate --template buf.gen.gogo.yaml`). ```bash -starport chain build +./scripts/protocgen.sh +``` +or + +```bash +ignite chain build ``` Generate Cosmos base openapi diff --git a/scripts/cosmos-swagger-gen.sh b/scripts/cosmos-swagger-gen.sh index e801c90a8..9f7ff3532 100755 --- a/scripts/cosmos-swagger-gen.sh +++ b/scripts/cosmos-swagger-gen.sh @@ -6,7 +6,7 @@ echo "Generating Cosmos openapi for '$TYPE'" CONFIG_FILE="$PWD/scripts/swagger/config/cosmos-$TYPE-config.json" OUTPUT_FILE="$PWD/docs/static/cosmos-$TYPE-openapi.json" -COSMOS_SDK_VERSION="v0.44.4" +COSMOS_SDK_VERSION="v0.47.8" rm -rf ./tmp-swagger-gen mkdir -p ./tmp-swagger-gen @@ -14,9 +14,22 @@ mkdir -p ./tmp-swagger-gen # clone cosmos-sdk repo git clone -b $COSMOS_SDK_VERSION --depth 1 https://github.com/cosmos/cosmos-sdk.git ./tmp-swagger-gen/cosmos-sdk -# generate openapi -pushd ./tmp-swagger-gen/cosmos-sdk -../../scripts/swagger/protoc-swagger-gen.sh $CONFIG_FILE $OUTPUT_FILE -popd +cd ./tmp-swagger-gen/cosmos-sdk/proto +pwd +proto_dirs=$(find ./cosmos -path -prune -o -name '*.proto' -print0 | xargs -0 -n1 dirname | sort | uniq) +for dir in $proto_dirs; do + # generate swagger files (filter query files) + query_file=$(find "${dir}" -maxdepth 2 \( -name 'query.proto' -o -name 'service.proto' \)) + if [[ ! -z "$query_file" ]]; then + buf generate --template buf.gen.swagger.yaml $query_file + fi +done +cd .. +# combine swagger files +# uses nodejs package `swagger-combine`. +# all the individual swagger files need to be configured in `config.json` for merging +swagger-combine $CONFIG_FILE -o $OUTPUT_FILE --continueOnConflictingPaths true --includeDefinitions true +cd ../.. +# clean swagger files rm -rf ./tmp-swagger-gen \ No newline at end of file diff --git a/scripts/protocgen.sh b/scripts/protocgen.sh new file mode 100755 index 000000000..3829edec5 --- /dev/null +++ b/scripts/protocgen.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +set -euox pipefail + +echo "Generating gogo proto code" +cd proto + +buf generate --template buf.gen.gogo.yaml + +cd .. + +# move proto files to the right places +cp -r github.com/zigbee-alliance/distributed-compliance-ledger/* ./ +rm -rf github.com diff --git a/scripts/swagger/config/cosmos-base-config.json b/scripts/swagger/config/cosmos-base-config.json index b6af8a27f..deee20f9e 100644 --- a/scripts/swagger/config/cosmos-base-config.json +++ b/scripts/swagger/config/cosmos-base-config.json @@ -1,12 +1,13 @@ { "swagger": "2.0", "info": { - "title": "cosmos/base/tendermint/v1beta1/query.proto", - "version": "v0.44.4","description": "A REST interface for base service endpoints" + "title": "Cosmos SDK - gRPC Gateway docs", + "description": "A REST interface for state queries", + "version": "1.0.0" }, "apis": [ { - "url": "cosmos/base/tendermint/v1beta1/query.swagger.json", + "url": "./tmp-swagger-gen/cosmos/base/tendermint/v1beta1/query.swagger.json", "operationIds": { "rename": { "Params": "BaseParams" diff --git a/scripts/swagger/config/cosmos-tx-config.json b/scripts/swagger/config/cosmos-tx-config.json index 3301fd2ec..f8642cfb5 100644 --- a/scripts/swagger/config/cosmos-tx-config.json +++ b/scripts/swagger/config/cosmos-tx-config.json @@ -2,11 +2,11 @@ "swagger": "2.0", "info": { "title": "cosmos/tx/v1beta1/service.proto", - "version": "v0.44.4","description": "A REST interface for tx service endpoints" + "version": "v0.47.8","description": "A REST interface for tx service endpoints" }, "apis": [ { - "url": "cosmos/tx/v1beta1/service.swagger.json", + "url": "./tmp-swagger-gen/cosmos/tx/v1beta1/service.swagger.json", "dereference": { "circular": "ignore" } diff --git a/scripts/swagger/config/dcl-config.json b/scripts/swagger/config/dcl-config.json index 7b0ec728f..5515e1363 100644 --- a/scripts/swagger/config/dcl-config.json +++ b/scripts/swagger/config/dcl-config.json @@ -9,7 +9,7 @@ "url": "https://github.com/zigbee-alliance/distributed-compliance-ledger/blob/master/LICENSE" }, - "description": "A REST interface for state queries. \nTendermint RPC endpoints are available here: https://docs.tendermint.com/v0.34/rpc/." + "description": "A REST interface for state queries. \nCometBFT RPC endpoints are available here: https://docs.cometbft.com/v0.37/rpc/." }, "apis": [ { diff --git a/scripts/swagger/protoc-swagger-gen.sh b/scripts/swagger/protoc-swagger-gen.sh index cb4c2a12f..2c1dd2172 100755 --- a/scripts/swagger/protoc-swagger-gen.sh +++ b/scripts/swagger/protoc-swagger-gen.sh @@ -15,12 +15,8 @@ for dir in $proto_dirs; do # generate swagger files (filter query files) query_file=$(find "${dir}" -maxdepth 1 \( -name 'query.proto' -o -name 'service.proto' \)) if [[ ! -z "$query_file" ]]; then - protoc \ - -I "proto" \ - -I "third_party/proto" \ - "$query_file" \ - --swagger_out=./swagger-out \ - --swagger_opt=logtostderr=true --swagger_opt=fqn_for_swagger_name=true --swagger_opt=simple_operation_ids=true + buf generate --template proto/buf.gen.swagger.yaml "$query_file" + cp -r zigbeealliance/distributedcomplianceledger/* swagger-out/ fi done @@ -31,4 +27,5 @@ pushd ./swagger-out swagger-combine $CONFIG_FILE -o $OUTPUT_FILE --continueOnConflictingPaths true --includeDefinitions true popd +rm -rf zigbeealliance rm -rf ./swagger-out \ No newline at end of file diff --git a/testutil/cli/cli.go b/testutil/cli/cli.go index 89cb75d26..3a8ec3d24 100644 --- a/testutil/cli/cli.go +++ b/testutil/cli/cli.go @@ -3,10 +3,10 @@ package cli import ( "testing" + "cosmossdk.io/errors" "github.com/cosmos/cosmos-sdk/client" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" sdk "github.com/cosmos/cosmos-sdk/types" - sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/spf13/cobra" "github.com/stretchr/testify/require" ) @@ -22,7 +22,7 @@ func ExecTestCLITxCmd(t *testing.T, clientCtx client.Context, cmd *cobra.Command require.NoError(t, err) if resp.Code != 0 { - err = sdkerrors.ABCIError(resp.Codespace, resp.Code, resp.RawLog) + err = errors.ABCIError(resp.Codespace, resp.Code, resp.RawLog) return nil, err } diff --git a/testutil/keeper/compliance.go b/testutil/keeper/compliance.go index 47927f746..22036a5d3 100644 --- a/testutil/keeper/compliance.go +++ b/testutil/keeper/compliance.go @@ -3,15 +3,15 @@ package keeper import ( "testing" + tmdb "github.com/cometbft/cometbft-db" + "github.com/cometbft/cometbft/libs/log" + tmproto "github.com/cometbft/cometbft/proto/tendermint/types" "github.com/cosmos/cosmos-sdk/codec" codectypes "github.com/cosmos/cosmos-sdk/codec/types" "github.com/cosmos/cosmos-sdk/store" storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/stretchr/testify/require" - "github.com/tendermint/tendermint/libs/log" - tmproto "github.com/tendermint/tendermint/proto/tendermint/types" - tmdb "github.com/tendermint/tm-db" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" ) @@ -27,8 +27,8 @@ func ComplianceKeeper( db := tmdb.NewMemDB() stateStore := store.NewCommitMultiStore(db) - stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) - stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + stateStore.MountStoreWithDB(storeKey, storetypes.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, storetypes.StoreTypeMemory, nil) require.NoError(tb, stateStore.LoadLatestVersion()) registry := codectypes.NewInterfaceRegistry() diff --git a/testutil/keeper/dclauth.go b/testutil/keeper/dclauth.go index 073c3f58f..511dc2c95 100644 --- a/testutil/keeper/dclauth.go +++ b/testutil/keeper/dclauth.go @@ -3,6 +3,9 @@ package keeper import ( "testing" + tmdb "github.com/cometbft/cometbft-db" + "github.com/cometbft/cometbft/libs/log" + tmproto "github.com/cometbft/cometbft/proto/tendermint/types" "github.com/cosmos/cosmos-sdk/codec" codectypes "github.com/cosmos/cosmos-sdk/codec/types" cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" @@ -10,9 +13,6 @@ import ( storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/stretchr/testify/require" - "github.com/tendermint/tendermint/libs/log" - tmproto "github.com/tendermint/tendermint/proto/tendermint/types" - tmdb "github.com/tendermint/tm-db" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) @@ -24,8 +24,8 @@ func DclauthKeeper(tb testing.TB) (*keeper.Keeper, sdk.Context) { db := tmdb.NewMemDB() stateStore := store.NewCommitMultiStore(db) - stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) - stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + stateStore.MountStoreWithDB(storeKey, storetypes.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, storetypes.StoreTypeMemory, nil) require.NoError(tb, stateStore.LoadLatestVersion()) registry := codectypes.NewInterfaceRegistry() diff --git a/testutil/keeper/dclupgrade.go b/testutil/keeper/dclupgrade.go index b6de6d012..6c1cc6cc4 100644 --- a/testutil/keeper/dclupgrade.go +++ b/testutil/keeper/dclupgrade.go @@ -3,15 +3,15 @@ package keeper import ( "testing" + tmdb "github.com/cometbft/cometbft-db" + "github.com/cometbft/cometbft/libs/log" + tmproto "github.com/cometbft/cometbft/proto/tendermint/types" "github.com/cosmos/cosmos-sdk/codec" codectypes "github.com/cosmos/cosmos-sdk/codec/types" "github.com/cosmos/cosmos-sdk/store" storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/stretchr/testify/require" - "github.com/tendermint/tendermint/libs/log" - tmproto "github.com/tendermint/tendermint/proto/tendermint/types" - tmdb "github.com/tendermint/tm-db" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types" ) @@ -23,8 +23,8 @@ func DclupgradeKeeper(tb testing.TB, dclAuthKeeper types.DclauthKeeper, upgradeK db := tmdb.NewMemDB() stateStore := store.NewCommitMultiStore(db) - stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) - stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + stateStore.MountStoreWithDB(storeKey, storetypes.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, storetypes.StoreTypeMemory, nil) require.NoError(tb, stateStore.LoadLatestVersion()) registry := codectypes.NewInterfaceRegistry() diff --git a/testutil/keeper/model.go b/testutil/keeper/model.go index 12672854b..4c5c225fb 100644 --- a/testutil/keeper/model.go +++ b/testutil/keeper/model.go @@ -3,28 +3,28 @@ package keeper import ( "testing" + tmdb "github.com/cometbft/cometbft-db" + "github.com/cometbft/cometbft/libs/log" + tmproto "github.com/cometbft/cometbft/proto/tendermint/types" "github.com/cosmos/cosmos-sdk/codec" codectypes "github.com/cosmos/cosmos-sdk/codec/types" "github.com/cosmos/cosmos-sdk/store" storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/stretchr/testify/require" - "github.com/tendermint/tendermint/libs/log" - tmproto "github.com/tendermint/tendermint/proto/tendermint/types" - tmdb "github.com/tendermint/tm-db" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" ) -func ModelKeeper(tb testing.TB, dclauthKeeper types.DclauthKeeper, complianceKeeper types.ComplianceKeeper) (*keeper.Keeper, sdk.Context) { +func ModelKeeper(tb testing.TB, dclauthKeeper types.DclauthKeeper, complianceKeeper keeper.ComplianceKeeper) (*keeper.Keeper, sdk.Context) { tb.Helper() storeKey := sdk.NewKVStoreKey(types.StoreKey) memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) db := tmdb.NewMemDB() stateStore := store.NewCommitMultiStore(db) - stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) - stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + stateStore.MountStoreWithDB(storeKey, storetypes.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, storetypes.StoreTypeMemory, nil) require.NoError(tb, stateStore.LoadLatestVersion()) registry := codectypes.NewInterfaceRegistry() diff --git a/testutil/keeper/pki.go b/testutil/keeper/pki.go index 61a58b9c4..858069507 100644 --- a/testutil/keeper/pki.go +++ b/testutil/keeper/pki.go @@ -3,15 +3,15 @@ package keeper import ( "testing" + tmdb "github.com/cometbft/cometbft-db" + "github.com/cometbft/cometbft/libs/log" + tmproto "github.com/cometbft/cometbft/proto/tendermint/types" "github.com/cosmos/cosmos-sdk/codec" codectypes "github.com/cosmos/cosmos-sdk/codec/types" "github.com/cosmos/cosmos-sdk/store" storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/stretchr/testify/require" - "github.com/tendermint/tendermint/libs/log" - tmproto "github.com/tendermint/tendermint/proto/tendermint/types" - tmdb "github.com/tendermint/tm-db" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types" @@ -24,8 +24,8 @@ func PkiKeeper(tb testing.TB, dclauthKeeper types.DclauthKeeper) (*keeper.Keeper db := tmdb.NewMemDB() stateStore := store.NewCommitMultiStore(db) - stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) - stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + stateStore.MountStoreWithDB(storeKey, storetypes.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, storetypes.StoreTypeMemory, nil) require.NoError(tb, stateStore.LoadLatestVersion()) registry := codectypes.NewInterfaceRegistry() diff --git a/testutil/keeper/validator.go b/testutil/keeper/validator.go index a43b9e236..ec40469c3 100644 --- a/testutil/keeper/validator.go +++ b/testutil/keeper/validator.go @@ -3,6 +3,9 @@ package keeper import ( "testing" + tmdb "github.com/cometbft/cometbft-db" + "github.com/cometbft/cometbft/libs/log" + tmproto "github.com/cometbft/cometbft/proto/tendermint/types" "github.com/cosmos/cosmos-sdk/codec" codectypes "github.com/cosmos/cosmos-sdk/codec/types" cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" @@ -10,9 +13,6 @@ import ( storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/stretchr/testify/require" - "github.com/tendermint/tendermint/libs/log" - tmproto "github.com/tendermint/tendermint/proto/tendermint/types" - tmdb "github.com/tendermint/tm-db" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" dclauthkeeper "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/keeper" @@ -26,13 +26,13 @@ func ValidatorKeeper(tb testing.TB, dclauthK *dclauthkeeper.Keeper) (*keeper.Kee db := tmdb.NewMemDB() stateStore := store.NewCommitMultiStore(db) - stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) - stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + stateStore.MountStoreWithDB(storeKey, storetypes.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, storetypes.StoreTypeMemory, nil) // TODO issue 99: might be not the best solution if dclauthK != nil { - stateStore.MountStoreWithDB(dclauthK.StoreKey(), sdk.StoreTypeIAVL, db) - stateStore.MountStoreWithDB(dclauthK.MemKey(), sdk.StoreTypeMemory, nil) + stateStore.MountStoreWithDB(dclauthK.StoreKey(), storetypes.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(dclauthK.MemKey(), storetypes.StoreTypeMemory, nil) } require.NoError(tb, stateStore.LoadLatestVersion()) diff --git a/testutil/keeper/vendorinfo.go b/testutil/keeper/vendorinfo.go index 5cf253db3..6b163a6f9 100644 --- a/testutil/keeper/vendorinfo.go +++ b/testutil/keeper/vendorinfo.go @@ -3,15 +3,15 @@ package keeper import ( "testing" + tmdb "github.com/cometbft/cometbft-db" + "github.com/cometbft/cometbft/libs/log" + tmproto "github.com/cometbft/cometbft/proto/tendermint/types" "github.com/cosmos/cosmos-sdk/codec" codectypes "github.com/cosmos/cosmos-sdk/codec/types" "github.com/cosmos/cosmos-sdk/store" storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/stretchr/testify/require" - "github.com/tendermint/tendermint/libs/log" - tmproto "github.com/tendermint/tendermint/proto/tendermint/types" - tmdb "github.com/tendermint/tm-db" "github.com/zigbee-alliance/distributed-compliance-ledger/x/vendorinfo/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/vendorinfo/types" ) @@ -23,8 +23,8 @@ func VendorinfoKeeper(tb testing.TB, dclauthKeeper types.DclauthKeeper) (*keeper db := tmdb.NewMemDB() stateStore := store.NewCommitMultiStore(db) - stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) - stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + stateStore.MountStoreWithDB(storeKey, storetypes.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, storetypes.StoreTypeMemory, nil) require.NoError(tb, stateStore.LoadLatestVersion()) registry := codectypes.NewInterfaceRegistry() diff --git a/testutil/network/network.go b/testutil/network/network.go index a742ce254..ce7a87c00 100644 --- a/testutil/network/network.go +++ b/testutil/network/network.go @@ -1,3 +1,4 @@ +//nolint:staticcheck package network import ( @@ -6,6 +7,7 @@ import ( "encoding/json" "errors" "fmt" + "io/ioutil" "net" "net/http" @@ -16,6 +18,15 @@ import ( "testing" "time" + "github.com/stretchr/testify/require" + "google.golang.org/grpc" + + "cosmossdk.io/math" + tmdb "github.com/cometbft/cometbft-db" + "github.com/cometbft/cometbft/libs/log" + tmrand "github.com/cometbft/cometbft/libs/rand" + "github.com/cometbft/cometbft/node" + tmclient "github.com/cometbft/cometbft/rpc/client" "github.com/cosmos/cosmos-sdk/baseapp" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/tx" @@ -28,26 +39,23 @@ import ( "github.com/cosmos/cosmos-sdk/server/api" srvconfig "github.com/cosmos/cosmos-sdk/server/config" servertypes "github.com/cosmos/cosmos-sdk/server/types" - "github.com/cosmos/cosmos-sdk/simapp" - "github.com/cosmos/cosmos-sdk/simapp/params" - storetypes "github.com/cosmos/cosmos-sdk/store/types" + pruningtypes "github.com/cosmos/cosmos-sdk/store/pruning/types" + "github.com/cosmos/cosmos-sdk/testutil" + "github.com/cosmos/cosmos-sdk/testutil/network" sdk "github.com/cosmos/cosmos-sdk/types" + _ "github.com/cosmos/cosmos-sdk/x/auth" //nolint: nolintlint + _ "github.com/cosmos/cosmos-sdk/x/auth/tx/config" authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" - "github.com/stretchr/testify/require" - "github.com/tendermint/spm/cosmoscmd" - tmcfg "github.com/tendermint/tendermint/config" - tmflags "github.com/tendermint/tendermint/libs/cli/flags" - "github.com/tendermint/tendermint/libs/log" - tmrand "github.com/tendermint/tendermint/libs/rand" - "github.com/tendermint/tendermint/node" - tmclient "github.com/tendermint/tendermint/rpc/client" - tmdb "github.com/tendermint/tm-db" + _ "github.com/cosmos/cosmos-sdk/x/bank" //nolint: nolintlint + _ "github.com/cosmos/cosmos-sdk/x/consensus" + _ "github.com/cosmos/cosmos-sdk/x/params" + _ "github.com/cosmos/cosmos-sdk/x/staking" + "github.com/zigbee-alliance/distributed-compliance-ledger/app" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclgenutil" validatortypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" - "google.golang.org/grpc" ) // New creates instance with fully configured cosmos network. @@ -72,33 +80,41 @@ func New(t *testing.T, configs ...Config) *Network { // DefaultConfig will initialize config for the network with custom application, // genesis and single validator. All other parameters are inherited from cosmos-sdk/testutil/network.DefaultConfig. func DefaultConfig() Config { - encoding := cosmoscmd.MakeEncodingConfig(app.ModuleBasics) + var ( + encoding = app.MakeEncodingConfig() + chainID = "chain-" + tmrand.NewRand().Str(6) + ) return Config{ - Codec: encoding.Marshaler, + Codec: encoding.Codec, TxConfig: encoding.TxConfig, LegacyAmino: encoding.Amino, InterfaceRegistry: encoding.InterfaceRegistry, AccountRetriever: dclauthtypes.AccountRetriever{}, - AppConstructor: func(val Validator) servertypes.Application { + AppConstructor: func(val network.ValidatorI) servertypes.Application { return app.New( - val.Ctx.Logger, tmdb.NewMemDB(), nil, true, map[int64]bool{}, val.Ctx.Config.RootDir, 0, + val.GetCtx().Logger, + tmdb.NewMemDB(), + nil, + true, map[int64]bool{}, + val.GetCtx().Config.RootDir, + 0, encoding, - simapp.EmptyAppOptions{}, - baseapp.SetPruning(storetypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), - baseapp.SetMinGasPrices(val.AppConfig.MinGasPrices), + baseapp.SetPruning(pruningtypes.NewPruningOptionsFromString(val.GetAppConfig().Pruning)), + baseapp.SetMinGasPrices(val.GetAppConfig().MinGasPrices), + baseapp.SetChainID(chainID), ) }, - GenesisState: app.ModuleBasics.DefaultGenesis(encoding.Marshaler), + GenesisState: app.ModuleBasics.DefaultGenesis(encoding.Codec), TimeoutCommit: 2 * time.Second, - ChainID: "chain-" + tmrand.NewRand().Str(6), + ChainID: chainID, NumValidators: 1, BondDenom: sdk.DefaultBondDenom, MinGasPrices: fmt.Sprintf("0.000006%s", sdk.DefaultBondDenom), AccountTokens: sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction), StakingTokens: sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction), BondedTokens: sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction), - PruningStrategy: storetypes.PruningOptionNothing, + PruningStrategy: pruningtypes.PruningOptionNothing, CleanupDir: true, SigningAlgo: string(hd.Secp256k1Type), KeyringOptions: []keyring.Option{}, @@ -116,20 +132,7 @@ var lock = new(sync.Mutex) // AppConstructor defines a function which accepts a network configuration and // creates an ABCI Application to provide to Tendermint. -type AppConstructor = func(val Validator) servertypes.Application - -// NewAppConstructor returns a new simapp AppConstructor. -func NewAppConstructor(encodingCfg params.EncodingConfig) AppConstructor { - return func(val Validator) servertypes.Application { - return simapp.NewSimApp( - val.Ctx.Logger, tmdb.NewMemDB(), nil, true, make(map[int64]bool), val.Ctx.Config.RootDir, 0, - encodingCfg, - simapp.EmptyAppOptions{}, - baseapp.SetPruning(storetypes.NewPruningOptionsFromString(val.AppConfig.Pruning)), - baseapp.SetMinGasPrices(val.AppConfig.MinGasPrices), - ) - } -} +type AppConstructor = func(val network.ValidatorI) servertypes.Application // Config defines the necessary configuration used to bootstrap and start an // in-process local testing network. @@ -145,11 +148,12 @@ type Config struct { TimeoutCommit time.Duration // the consensus commitment timeout ChainID string // the network chain-id NumValidators int // the total number of validators to create and bond + Mnemonics []string // custom user-provided validator operator mnemonics BondDenom string // the staking bond denomination MinGasPrices string // the minimum gas prices each validator will accept - AccountTokens sdk.Int // the amount of unique validator tokens (e.g. 1000node0) - StakingTokens sdk.Int // the amount of tokens each validator has available to stake - BondedTokens sdk.Int // the amount of tokens each validator stakes + AccountTokens math.Int // the amount of unique validator tokens (e.g. 1000node0) + StakingTokens math.Int // the amount of tokens each validator has available to stake + BondedTokens math.Int // the amount of tokens each validator stakes PruningStrategy string // the pruning strategy each validator will have EnableLogging bool // enable Tendermint logging to STDOUT CleanupDir bool // remove base temporary directory during cleanup @@ -201,6 +205,14 @@ type ( } ) +func (v Validator) GetCtx() *server.Context { + return v.Ctx +} + +func (v Validator) GetAppConfig() *srvconfig.Config { + return v.AppConfig +} + // New creates a new Network for integration tests. func newNetwork(t *testing.T, cfg Config) *Network { t.Helper() @@ -278,7 +290,6 @@ func newNetwork(t *testing.T, cfg Config) *Network { logger := log.NewNopLogger() if cfg.EnableLogging { logger = log.NewTMLogger(log.NewSyncWriter(os.Stdout)) - logger, _ = tmflags.ParseLogLevel("info", logger, tmcfg.DefaultLogLevel) } ctx.Logger = logger @@ -310,14 +321,19 @@ func newNetwork(t *testing.T, cfg Config) *Network { nodeIDs[i] = nodeID valPubKeys[i] = pubKey - kb, err := keyring.New(sdk.KeyringServiceName(), keyring.BackendTest, clientDir, buf, cfg.KeyringOptions...) + kb, err := keyring.New(sdk.KeyringServiceName(), keyring.BackendTest, clientDir, buf, cfg.Codec, cfg.KeyringOptions...) require.NoError(t, err) keyringAlgos, _ := kb.SupportedAlgorithms() algo, err := keyring.NewSigningAlgoFromString(cfg.SigningAlgo, keyringAlgos) require.NoError(t, err) - addr, secret, err := server.GenerateSaveCoinKey(kb, nodeDirName, true, algo) + var mnemonic string + if i < len(cfg.Mnemonics) { + mnemonic = cfg.Mnemonics[i] + } + + addr, secret, err := testutil.GenerateSaveCoinKey(kb, nodeDirName, mnemonic, true, algo) require.NoError(t, err) info := map[string]string{"secret": secret} diff --git a/testutil/network/util.go b/testutil/network/util.go index 60922c536..d58fe5899 100644 --- a/testutil/network/util.go +++ b/testutil/network/util.go @@ -5,14 +5,14 @@ import ( "path/filepath" "time" - tmos "github.com/tendermint/tendermint/libs/os" - "github.com/tendermint/tendermint/node" - "github.com/tendermint/tendermint/p2p" - pvm "github.com/tendermint/tendermint/privval" - "github.com/tendermint/tendermint/proxy" - "github.com/tendermint/tendermint/rpc/client/local" - "github.com/tendermint/tendermint/types" - tmtime "github.com/tendermint/tendermint/types/time" + tmos "github.com/cometbft/cometbft/libs/os" + "github.com/cometbft/cometbft/node" + "github.com/cometbft/cometbft/p2p" + pvm "github.com/cometbft/cometbft/privval" + "github.com/cometbft/cometbft/proxy" + "github.com/cometbft/cometbft/rpc/client/local" + "github.com/cometbft/cometbft/types" + tmtime "github.com/cometbft/cometbft/types/time" "github.com/cosmos/cosmos-sdk/server/api" servergrpc "github.com/cosmos/cosmos-sdk/server/grpc" @@ -102,7 +102,7 @@ func startInProcess(cfg Config, val *Validator) error { } if val.AppConfig.GRPC.Enable { - grpcSrv, err := servergrpc.StartGRPCServer(val.ClientCtx, app, val.AppConfig.GRPC.Address) + grpcSrv, err := servergrpc.StartGRPCServer(val.ClientCtx, app, val.AppConfig.GRPC) if err != nil { return err } diff --git a/testutil/simapp/simapp.go b/testutil/simapp/simapp.go deleted file mode 100644 index 229268a0d..000000000 --- a/testutil/simapp/simapp.go +++ /dev/null @@ -1,49 +0,0 @@ -package simapp - -import ( - "time" - - "github.com/cosmos/cosmos-sdk/simapp" - "github.com/tendermint/spm/cosmoscmd" - abci "github.com/tendermint/tendermint/abci/types" - "github.com/tendermint/tendermint/libs/log" - tmproto "github.com/tendermint/tendermint/proto/tendermint/types" - tmtypes "github.com/tendermint/tendermint/types" - tmdb "github.com/tendermint/tm-db" - "github.com/zigbee-alliance/distributed-compliance-ledger/app" -) - -// New creates application instance with in-memory database and disabled logging. -func New(dir string) cosmoscmd.App { - db := tmdb.NewMemDB() - logger := log.NewNopLogger() - - encoding := cosmoscmd.MakeEncodingConfig(app.ModuleBasics) - - a := app.New(logger, db, nil, true, map[int64]bool{}, dir, 0, encoding, - simapp.EmptyAppOptions{}) - // InitChain updates deliverState which is required when app.NewContext is called - a.InitChain(abci.RequestInitChain{ - ConsensusParams: defaultConsensusParams, - AppStateBytes: []byte("{}"), - }) - - return a -} - -var defaultConsensusParams = &abci.ConsensusParams{ - Block: &abci.BlockParams{ - MaxBytes: 200000, - MaxGas: 2000000, - }, - Evidence: &tmproto.EvidenceParams{ - MaxAgeNumBlocks: 302400, - MaxAgeDuration: 504 * time.Hour, // 3 weeks is the max duration - MaxBytes: 10000, - }, - Validator: &tmproto.ValidatorParams{ - PubKeyTypes: []string{ - tmtypes.ABCIPubKeyTypeEd25519, - }, - }, -} diff --git a/third_party/proto/cosmos/auth/v1beta1/auth.proto b/third_party/proto/cosmos/auth/v1beta1/auth.proto deleted file mode 100644 index 981df4c0c..000000000 --- a/third_party/proto/cosmos/auth/v1beta1/auth.proto +++ /dev/null @@ -1,50 +0,0 @@ -syntax = "proto3"; -package cosmos.auth.v1beta1; - -import "cosmos_proto/cosmos.proto"; -import "gogoproto/gogo.proto"; -import "google/protobuf/any.proto"; - -option go_package = "github.com/cosmos/cosmos-sdk/x/auth/types"; - -// BaseAccount defines a base account type. It contains all the necessary fields -// for basic account functionality. Any custom account type should extend this -// type for additional functionality (e.g. vesting). -message BaseAccount { - option (gogoproto.goproto_getters) = false; - option (gogoproto.goproto_stringer) = false; - option (gogoproto.equal) = false; - - //option (cosmos_proto.implements_interface) = "AccountI"; - - string address = 1 [(cosmos_proto.scalar) = "cosmos.AddressString"]; - google.protobuf.Any pub_key = 2 - [(gogoproto.jsontag) = "public_key,omitempty"]; - uint64 account_number = 3; - uint64 sequence = 4; -} - -// ModuleAccount defines an account for modules that holds coins on a pool. -message ModuleAccount { - option (gogoproto.goproto_getters) = false; - option (gogoproto.goproto_stringer) = false; - //option (cosmos_proto.implements_interface) = "ModuleAccountI"; - - BaseAccount base_account = 1 [(gogoproto.embed) = true]; - string name = 2; - repeated string permissions = 3; -} - -// Params defines the parameters for the auth module. -message Params { - option (gogoproto.equal) = true; - option (gogoproto.goproto_stringer) = false; - - uint64 max_memo_characters = 1; - uint64 tx_sig_limit = 2; - uint64 tx_size_cost_per_byte = 3; - uint64 sig_verify_cost_ed25519 = 4 - [(gogoproto.customname) = "SigVerifyCostED25519"]; - uint64 sig_verify_cost_secp256k1 = 5 - [(gogoproto.customname) = "SigVerifyCostSecp256k1"]; -} diff --git a/third_party/proto/cosmos/base/abci/v1beta1/abci.proto b/third_party/proto/cosmos/base/abci/v1beta1/abci.proto deleted file mode 100644 index 8bfeaf8e1..000000000 --- a/third_party/proto/cosmos/base/abci/v1beta1/abci.proto +++ /dev/null @@ -1,137 +0,0 @@ -syntax = "proto3"; -package cosmos.base.abci.v1beta1; - -import "gogoproto/gogo.proto"; -import "tendermint/abci/types.proto"; -import "google/protobuf/any.proto"; - -option go_package = "github.com/cosmos/cosmos-sdk/types"; -option (gogoproto.goproto_stringer_all) = false; - -// TxResponse defines a structure containing relevant tx data and metadata. The -// tags are stringified and the log is JSON decoded. -message TxResponse { - option (gogoproto.goproto_getters) = false; - // The block height - int64 height = 1; - // The transaction hash. - string txhash = 2 [(gogoproto.customname) = "TxHash"]; - // Namespace for the Code - string codespace = 3; - // Response code. - uint32 code = 4; - // Result bytes, if any. - string data = 5; - // The output of the application's logger (raw string). May be - // non-deterministic. - string raw_log = 6; - // The output of the application's logger (typed). May be non-deterministic. - repeated ABCIMessageLog logs = 7 [(gogoproto.castrepeated) = "ABCIMessageLogs", (gogoproto.nullable) = false]; - // Additional information. May be non-deterministic. - string info = 8; - // Amount of gas requested for transaction. - int64 gas_wanted = 9; - // Amount of gas consumed by transaction. - int64 gas_used = 10; - // The request transaction bytes. - google.protobuf.Any tx = 11; - // Time of the previous block. For heights > 1, it's the weighted median of - // the timestamps of the valid votes in the block.LastCommit. For height == 1, - // it's genesis time. - string timestamp = 12; -} - -// ABCIMessageLog defines a structure containing an indexed tx ABCI message log. -message ABCIMessageLog { - option (gogoproto.stringer) = true; - - uint32 msg_index = 1; - string log = 2; - - // Events contains a slice of Event objects that were emitted during some - // execution. - repeated StringEvent events = 3 [(gogoproto.castrepeated) = "StringEvents", (gogoproto.nullable) = false]; -} - -// StringEvent defines en Event object wrapper where all the attributes -// contain key/value pairs that are strings instead of raw bytes. -message StringEvent { - option (gogoproto.stringer) = true; - - string type = 1; - repeated Attribute attributes = 2 [(gogoproto.nullable) = false]; -} - -// Attribute defines an attribute wrapper where the key and value are -// strings instead of raw bytes. -message Attribute { - string key = 1; - string value = 2; -} - -// GasInfo defines tx execution gas context. -message GasInfo { - // GasWanted is the maximum units of work we allow this tx to perform. - uint64 gas_wanted = 1; - - // GasUsed is the amount of gas actually consumed. - uint64 gas_used = 2; -} - -// Result is the union of ResponseFormat and ResponseCheckTx. -message Result { - option (gogoproto.goproto_getters) = false; - - // Data is any data returned from message or handler execution. It MUST be - // length prefixed in order to separate data from multiple message executions. - bytes data = 1; - - // Log contains the log information from message or handler execution. - string log = 2; - - // Events contains a slice of Event objects that were emitted during message - // or handler execution. - repeated tendermint.abci.Event events = 3 [(gogoproto.nullable) = false]; -} - -// SimulationResponse defines the response generated when a transaction is -// successfully simulated. -message SimulationResponse { - GasInfo gas_info = 1 [(gogoproto.embed) = true, (gogoproto.nullable) = false]; - Result result = 2; -} - -// MsgData defines the data returned in a Result object during message -// execution. -message MsgData { - option (gogoproto.stringer) = true; - - string msg_type = 1; - bytes data = 2; -} - -// TxMsgData defines a list of MsgData. A transaction will have a MsgData object -// for each message. -message TxMsgData { - option (gogoproto.stringer) = true; - - repeated MsgData data = 1; -} - -// SearchTxsResult defines a structure for querying txs pageable -message SearchTxsResult { - option (gogoproto.stringer) = true; - - // Count of all txs - uint64 total_count = 1 [(gogoproto.jsontag) = "total_count"]; - // Count of txs in current page - uint64 count = 2; - // Index of current page, start from 1 - uint64 page_number = 3 [(gogoproto.jsontag) = "page_number"]; - // Count of total pages - uint64 page_total = 4 [(gogoproto.jsontag) = "page_total"]; - // Max count txs per page - uint64 limit = 5; - // List of txs in current page - repeated TxResponse txs = 6; -} diff --git a/third_party/proto/cosmos/base/query/v1beta1/pagination.proto b/third_party/proto/cosmos/base/query/v1beta1/pagination.proto deleted file mode 100644 index cd5eb066d..000000000 --- a/third_party/proto/cosmos/base/query/v1beta1/pagination.proto +++ /dev/null @@ -1,55 +0,0 @@ -syntax = "proto3"; -package cosmos.base.query.v1beta1; - -option go_package = "github.com/cosmos/cosmos-sdk/types/query"; - -// PageRequest is to be embedded in gRPC request messages for efficient -// pagination. Ex: -// -// message SomeRequest { -// Foo some_parameter = 1; -// PageRequest pagination = 2; -// } -message PageRequest { - // key is a value returned in PageResponse.next_key to begin - // querying the next page most efficiently. Only one of offset or key - // should be set. - bytes key = 1; - - // offset is a numeric offset that can be used when key is unavailable. - // It is less efficient than using key. Only one of offset or key should - // be set. - uint64 offset = 2; - - // limit is the total number of results to be returned in the result page. - // If left empty it will default to a value to be set by each app. - uint64 limit = 3; - - // count_total is set to true to indicate that the result set should include - // a count of the total number of items available for pagination in UIs. - // count_total is only respected when offset is used. It is ignored when key - // is set. - bool count_total = 4; - - // reverse is set to true if results are to be returned in the descending order. - // - // Since: cosmos-sdk 0.43 - bool reverse = 5; -} - -// PageResponse is to be embedded in gRPC response messages where the -// corresponding request message has used PageRequest. -// -// message SomeResponse { -// repeated Bar results = 1; -// PageResponse page = 2; -// } -message PageResponse { - // next_key is the key to be passed to PageRequest.key to - // query the next page most efficiently - bytes next_key = 1; - - // total is total number of results available if PageRequest.count_total - // was set, its value is undefined otherwise - uint64 total = 2; -} diff --git a/third_party/proto/cosmos/upgrade/v1beta1/upgrade.proto b/third_party/proto/cosmos/upgrade/v1beta1/upgrade.proto deleted file mode 100644 index e888b393d..000000000 --- a/third_party/proto/cosmos/upgrade/v1beta1/upgrade.proto +++ /dev/null @@ -1,78 +0,0 @@ -syntax = "proto3"; -package cosmos.upgrade.v1beta1; - -import "google/protobuf/any.proto"; -import "gogoproto/gogo.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "github.com/cosmos/cosmos-sdk/x/upgrade/types"; -option (gogoproto.goproto_getters_all) = false; - -// Plan specifies information about a planned upgrade and when it should occur. -message Plan { - option (gogoproto.equal) = true; - option (gogoproto.goproto_stringer) = false; - - // Sets the name for the upgrade. This name will be used by the upgraded - // version of the software to apply any special "on-upgrade" commands during - // the first BeginBlock method after the upgrade is applied. It is also used - // to detect whether a software version can handle a given upgrade. If no - // upgrade handler with this name has been set in the software, it will be - // assumed that the software is out-of-date when the upgrade Time or Height is - // reached and the software will exit. - string name = 1; - - // Deprecated: Time based upgrades have been deprecated. Time based upgrade logic - // has been removed from the SDK. - // If this field is not empty, an error will be thrown. - google.protobuf.Timestamp time = 2 [deprecated = true, (gogoproto.stdtime) = true, (gogoproto.nullable) = false]; - - // The height at which the upgrade must be performed. - // Only used if Time is not set. - int64 height = 3; - - // Any application specific upgrade info to be included on-chain - // such as a git commit that validators could automatically upgrade to - string info = 4; - - // Deprecated: UpgradedClientState field has been deprecated. IBC upgrade logic has been - // moved to the IBC module in the sub module 02-client. - // If this field is not empty, an error will be thrown. - google.protobuf.Any upgraded_client_state = 5 - [deprecated = true, (gogoproto.moretags) = "yaml:\"upgraded_client_state\""]; -} - -// SoftwareUpgradeProposal is a gov Content type for initiating a software -// upgrade. -message SoftwareUpgradeProposal { - option (gogoproto.equal) = true; - option (gogoproto.goproto_stringer) = false; - - string title = 1; - string description = 2; - Plan plan = 3 [(gogoproto.nullable) = false]; -} - -// CancelSoftwareUpgradeProposal is a gov Content type for cancelling a software -// upgrade. -message CancelSoftwareUpgradeProposal { - option (gogoproto.equal) = true; - option (gogoproto.goproto_stringer) = false; - - string title = 1; - string description = 2; -} - -// ModuleVersion specifies a module and its consensus version. -// -// Since: cosmos-sdk 0.43 -message ModuleVersion { - option (gogoproto.equal) = true; - option (gogoproto.goproto_stringer) = true; - - // name of the app module - string name = 1; - - // consensus version of the app module - uint64 version = 2; -} diff --git a/third_party/proto/cosmos_proto/cosmos.proto b/third_party/proto/cosmos_proto/cosmos.proto deleted file mode 100644 index 641ae4639..000000000 --- a/third_party/proto/cosmos_proto/cosmos.proto +++ /dev/null @@ -1,18 +0,0 @@ -syntax = "proto3"; -package cosmos_proto; - -import "google/protobuf/descriptor.proto"; - -option go_package = "github.com/cosmos/cosmos-proto"; - -extend google.protobuf.MessageOptions { - // implements_interface is used to annotate interface implementations - string implements_interface = 93001; -} - -extend google.protobuf.FieldOptions { - // accepts_interface is used to annote fields that accept interfaces - string accepts_interface = 93001; - // scalar is used to define scalar types - string scalar = 93002; -} \ No newline at end of file diff --git a/third_party/proto/gogoproto/gogo.proto b/third_party/proto/gogoproto/gogo.proto deleted file mode 100644 index 49e78f99f..000000000 --- a/third_party/proto/gogoproto/gogo.proto +++ /dev/null @@ -1,145 +0,0 @@ -// Protocol Buffers for Go with Gadgets -// -// Copyright (c) 2013, The GoGo Authors. All rights reserved. -// http://github.com/gogo/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -syntax = "proto2"; -package gogoproto; - -import "google/protobuf/descriptor.proto"; - -option java_package = "com.google.protobuf"; -option java_outer_classname = "GoGoProtos"; -option go_package = "github.com/gogo/protobuf/gogoproto"; - -extend google.protobuf.EnumOptions { - optional bool goproto_enum_prefix = 62001; - optional bool goproto_enum_stringer = 62021; - optional bool enum_stringer = 62022; - optional string enum_customname = 62023; - optional bool enumdecl = 62024; -} - -extend google.protobuf.EnumValueOptions { - optional string enumvalue_customname = 66001; -} - -extend google.protobuf.FileOptions { - optional bool goproto_getters_all = 63001; - optional bool goproto_enum_prefix_all = 63002; - optional bool goproto_stringer_all = 63003; - optional bool verbose_equal_all = 63004; - optional bool face_all = 63005; - optional bool gostring_all = 63006; - optional bool populate_all = 63007; - optional bool stringer_all = 63008; - optional bool onlyone_all = 63009; - - optional bool equal_all = 63013; - optional bool description_all = 63014; - optional bool testgen_all = 63015; - optional bool benchgen_all = 63016; - optional bool marshaler_all = 63017; - optional bool unmarshaler_all = 63018; - optional bool stable_marshaler_all = 63019; - - optional bool sizer_all = 63020; - - optional bool goproto_enum_stringer_all = 63021; - optional bool enum_stringer_all = 63022; - - optional bool unsafe_marshaler_all = 63023; - optional bool unsafe_unmarshaler_all = 63024; - - optional bool goproto_extensions_map_all = 63025; - optional bool goproto_unrecognized_all = 63026; - optional bool gogoproto_import = 63027; - optional bool protosizer_all = 63028; - optional bool compare_all = 63029; - optional bool typedecl_all = 63030; - optional bool enumdecl_all = 63031; - - optional bool goproto_registration = 63032; - optional bool messagename_all = 63033; - - optional bool goproto_sizecache_all = 63034; - optional bool goproto_unkeyed_all = 63035; -} - -extend google.protobuf.MessageOptions { - optional bool goproto_getters = 64001; - optional bool goproto_stringer = 64003; - optional bool verbose_equal = 64004; - optional bool face = 64005; - optional bool gostring = 64006; - optional bool populate = 64007; - optional bool stringer = 67008; - optional bool onlyone = 64009; - - optional bool equal = 64013; - optional bool description = 64014; - optional bool testgen = 64015; - optional bool benchgen = 64016; - optional bool marshaler = 64017; - optional bool unmarshaler = 64018; - optional bool stable_marshaler = 64019; - - optional bool sizer = 64020; - - optional bool unsafe_marshaler = 64023; - optional bool unsafe_unmarshaler = 64024; - - optional bool goproto_extensions_map = 64025; - optional bool goproto_unrecognized = 64026; - - optional bool protosizer = 64028; - optional bool compare = 64029; - - optional bool typedecl = 64030; - - optional bool messagename = 64033; - - optional bool goproto_sizecache = 64034; - optional bool goproto_unkeyed = 64035; -} - -extend google.protobuf.FieldOptions { - optional bool nullable = 65001; - optional bool embed = 65002; - optional string customtype = 65003; - optional string customname = 65004; - optional string jsontag = 65005; - optional string moretags = 65006; - optional string casttype = 65007; - optional string castkey = 65008; - optional string castvalue = 65009; - - optional bool stdtime = 65010; - optional bool stdduration = 65011; - optional bool wktpointer = 65012; - - optional string castrepeated = 65013; -} diff --git a/third_party/proto/google/api/annotations.proto b/third_party/proto/google/api/annotations.proto deleted file mode 100644 index 85c361b47..000000000 --- a/third_party/proto/google/api/annotations.proto +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) 2015, Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.api; - -import "google/api/http.proto"; -import "google/protobuf/descriptor.proto"; - -option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; -option java_multiple_files = true; -option java_outer_classname = "AnnotationsProto"; -option java_package = "com.google.api"; -option objc_class_prefix = "GAPI"; - -extend google.protobuf.MethodOptions { - // See `HttpRule`. - HttpRule http = 72295728; -} diff --git a/third_party/proto/google/api/http.proto b/third_party/proto/google/api/http.proto deleted file mode 100644 index 2bd3a19bf..000000000 --- a/third_party/proto/google/api/http.proto +++ /dev/null @@ -1,318 +0,0 @@ -// Copyright 2018 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.api; - -option cc_enable_arenas = true; -option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; -option java_multiple_files = true; -option java_outer_classname = "HttpProto"; -option java_package = "com.google.api"; -option objc_class_prefix = "GAPI"; - - -// Defines the HTTP configuration for an API service. It contains a list of -// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method -// to one or more HTTP REST API methods. -message Http { - // A list of HTTP configuration rules that apply to individual API methods. - // - // **NOTE:** All service configuration rules follow "last one wins" order. - repeated HttpRule rules = 1; - - // When set to true, URL path parmeters will be fully URI-decoded except in - // cases of single segment matches in reserved expansion, where "%2F" will be - // left encoded. - // - // The default behavior is to not decode RFC 6570 reserved characters in multi - // segment matches. - bool fully_decode_reserved_expansion = 2; -} - -// `HttpRule` defines the mapping of an RPC method to one or more HTTP -// REST API methods. The mapping specifies how different portions of the RPC -// request message are mapped to URL path, URL query parameters, and -// HTTP request body. The mapping is typically specified as an -// `google.api.http` annotation on the RPC method, -// see "google/api/annotations.proto" for details. -// -// The mapping consists of a field specifying the path template and -// method kind. The path template can refer to fields in the request -// message, as in the example below which describes a REST GET -// operation on a resource collection of messages: -// -// -// service Messaging { -// rpc GetMessage(GetMessageRequest) returns (Message) { -// option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; -// } -// } -// message GetMessageRequest { -// message SubMessage { -// string subfield = 1; -// } -// string message_id = 1; // mapped to the URL -// SubMessage sub = 2; // `sub.subfield` is url-mapped -// } -// message Message { -// string text = 1; // content of the resource -// } -// -// The same http annotation can alternatively be expressed inside the -// `GRPC API Configuration` YAML file. -// -// http: -// rules: -// - selector: .Messaging.GetMessage -// get: /v1/messages/{message_id}/{sub.subfield} -// -// This definition enables an automatic, bidrectional mapping of HTTP -// JSON to RPC. Example: -// -// HTTP | RPC -// -----|----- -// `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` -// -// In general, not only fields but also field paths can be referenced -// from a path pattern. Fields mapped to the path pattern cannot be -// repeated and must have a primitive (non-message) type. -// -// Any fields in the request message which are not bound by the path -// pattern automatically become (optional) HTTP query -// parameters. Assume the following definition of the request message: -// -// -// service Messaging { -// rpc GetMessage(GetMessageRequest) returns (Message) { -// option (google.api.http).get = "/v1/messages/{message_id}"; -// } -// } -// message GetMessageRequest { -// message SubMessage { -// string subfield = 1; -// } -// string message_id = 1; // mapped to the URL -// int64 revision = 2; // becomes a parameter -// SubMessage sub = 3; // `sub.subfield` becomes a parameter -// } -// -// -// This enables a HTTP JSON to RPC mapping as below: -// -// HTTP | RPC -// -----|----- -// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` -// -// Note that fields which are mapped to HTTP parameters must have a -// primitive type or a repeated primitive type. Message types are not -// allowed. In the case of a repeated type, the parameter can be -// repeated in the URL, as in `...?param=A¶m=B`. -// -// For HTTP method kinds which allow a request body, the `body` field -// specifies the mapping. Consider a REST update method on the -// message resource collection: -// -// -// service Messaging { -// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { -// option (google.api.http) = { -// put: "/v1/messages/{message_id}" -// body: "message" -// }; -// } -// } -// message UpdateMessageRequest { -// string message_id = 1; // mapped to the URL -// Message message = 2; // mapped to the body -// } -// -// -// The following HTTP JSON to RPC mapping is enabled, where the -// representation of the JSON in the request body is determined by -// protos JSON encoding: -// -// HTTP | RPC -// -----|----- -// `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` -// -// The special name `*` can be used in the body mapping to define that -// every field not bound by the path template should be mapped to the -// request body. This enables the following alternative definition of -// the update method: -// -// service Messaging { -// rpc UpdateMessage(Message) returns (Message) { -// option (google.api.http) = { -// put: "/v1/messages/{message_id}" -// body: "*" -// }; -// } -// } -// message Message { -// string message_id = 1; -// string text = 2; -// } -// -// -// The following HTTP JSON to RPC mapping is enabled: -// -// HTTP | RPC -// -----|----- -// `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` -// -// Note that when using `*` in the body mapping, it is not possible to -// have HTTP parameters, as all fields not bound by the path end in -// the body. This makes this option more rarely used in practice of -// defining REST APIs. The common usage of `*` is in custom methods -// which don't use the URL at all for transferring data. -// -// It is possible to define multiple HTTP methods for one RPC by using -// the `additional_bindings` option. Example: -// -// service Messaging { -// rpc GetMessage(GetMessageRequest) returns (Message) { -// option (google.api.http) = { -// get: "/v1/messages/{message_id}" -// additional_bindings { -// get: "/v1/users/{user_id}/messages/{message_id}" -// } -// }; -// } -// } -// message GetMessageRequest { -// string message_id = 1; -// string user_id = 2; -// } -// -// -// This enables the following two alternative HTTP JSON to RPC -// mappings: -// -// HTTP | RPC -// -----|----- -// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` -// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` -// -// # Rules for HTTP mapping -// -// The rules for mapping HTTP path, query parameters, and body fields -// to the request message are as follows: -// -// 1. The `body` field specifies either `*` or a field path, or is -// omitted. If omitted, it indicates there is no HTTP request body. -// 2. Leaf fields (recursive expansion of nested messages in the -// request) can be classified into three types: -// (a) Matched in the URL template. -// (b) Covered by body (if body is `*`, everything except (a) fields; -// else everything under the body field) -// (c) All other fields. -// 3. URL query parameters found in the HTTP request are mapped to (c) fields. -// 4. Any body sent with an HTTP request can contain only (b) fields. -// -// The syntax of the path template is as follows: -// -// Template = "/" Segments [ Verb ] ; -// Segments = Segment { "/" Segment } ; -// Segment = "*" | "**" | LITERAL | Variable ; -// Variable = "{" FieldPath [ "=" Segments ] "}" ; -// FieldPath = IDENT { "." IDENT } ; -// Verb = ":" LITERAL ; -// -// The syntax `*` matches a single path segment. The syntax `**` matches zero -// or more path segments, which must be the last part of the path except the -// `Verb`. The syntax `LITERAL` matches literal text in the path. -// -// The syntax `Variable` matches part of the URL path as specified by its -// template. A variable template must not contain other variables. If a variable -// matches a single path segment, its template may be omitted, e.g. `{var}` -// is equivalent to `{var=*}`. -// -// If a variable contains exactly one path segment, such as `"{var}"` or -// `"{var=*}"`, when such a variable is expanded into a URL path, all characters -// except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the -// Discovery Document as `{var}`. -// -// If a variable contains one or more path segments, such as `"{var=foo/*}"` -// or `"{var=**}"`, when such a variable is expanded into a URL path, all -// characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables -// show up in the Discovery Document as `{+var}`. -// -// NOTE: While the single segment variable matches the semantics of -// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 -// Simple String Expansion, the multi segment variable **does not** match -// RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion -// does not expand special characters like `?` and `#`, which would lead -// to invalid URLs. -// -// NOTE: the field paths in variables and in the `body` must not refer to -// repeated fields or map fields. -message HttpRule { - // Selects methods to which this rule applies. - // - // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. - string selector = 1; - - // Determines the URL pattern is matched by this rules. This pattern can be - // used with any of the {get|put|post|delete|patch} methods. A custom method - // can be defined using the 'custom' field. - oneof pattern { - // Used for listing and getting information about resources. - string get = 2; - - // Used for updating a resource. - string put = 3; - - // Used for creating a resource. - string post = 4; - - // Used for deleting a resource. - string delete = 5; - - // Used for updating a resource. - string patch = 6; - - // The custom pattern is used for specifying an HTTP method that is not - // included in the `pattern` field, such as HEAD, or "*" to leave the - // HTTP method unspecified for this rule. The wild-card rule is useful - // for services that provide content to Web (HTML) clients. - CustomHttpPattern custom = 8; - } - - // The name of the request field whose value is mapped to the HTTP body, or - // `*` for mapping all fields not captured by the path pattern to the HTTP - // body. NOTE: the referred field must not be a repeated field and must be - // present at the top-level of request message type. - string body = 7; - - // Optional. The name of the response field whose value is mapped to the HTTP - // body of response. Other response fields are ignored. When - // not set, the response message will be used as HTTP body of response. - string response_body = 12; - - // Additional HTTP bindings for the selector. Nested bindings must - // not contain an `additional_bindings` field themselves (that is, - // the nesting may only be one level deep). - repeated HttpRule additional_bindings = 11; -} - -// A custom pattern is used for defining custom HTTP verb. -message CustomHttpPattern { - // The name of this custom HTTP verb. - string kind = 1; - - // The path matched by this custom verb. - string path = 2; -} diff --git a/third_party/proto/google/api/httpbody.proto b/third_party/proto/google/api/httpbody.proto deleted file mode 100644 index 4428515c1..000000000 --- a/third_party/proto/google/api/httpbody.proto +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2018 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.api; - -import "google/protobuf/any.proto"; - -option cc_enable_arenas = true; -option go_package = "google.golang.org/genproto/googleapis/api/httpbody;httpbody"; -option java_multiple_files = true; -option java_outer_classname = "HttpBodyProto"; -option java_package = "com.google.api"; -option objc_class_prefix = "GAPI"; - -// Message that represents an arbitrary HTTP body. It should only be used for -// payload formats that can't be represented as JSON, such as raw binary or -// an HTML page. -// -// -// This message can be used both in streaming and non-streaming API methods in -// the request as well as the response. -// -// It can be used as a top-level request field, which is convenient if one -// wants to extract parameters from either the URL or HTTP template into the -// request fields and also want access to the raw HTTP body. -// -// Example: -// -// message GetResourceRequest { -// // A unique request id. -// string request_id = 1; -// -// // The raw HTTP body is bound to this field. -// google.api.HttpBody http_body = 2; -// } -// -// service ResourceService { -// rpc GetResource(GetResourceRequest) returns (google.api.HttpBody); -// rpc UpdateResource(google.api.HttpBody) returns -// (google.protobuf.Empty); -// } -// -// Example with streaming methods: -// -// service CaldavService { -// rpc GetCalendar(stream google.api.HttpBody) -// returns (stream google.api.HttpBody); -// rpc UpdateCalendar(stream google.api.HttpBody) -// returns (stream google.api.HttpBody); -// } -// -// Use of this type only changes how the request and response bodies are -// handled, all other features will continue to work unchanged. -message HttpBody { - // The HTTP Content-Type header value specifying the content type of the body. - string content_type = 1; - - // The HTTP request/response body as raw binary. - bytes data = 2; - - // Application specific response metadata. Must be set in the first response - // for streaming APIs. - repeated google.protobuf.Any extensions = 3; -} \ No newline at end of file diff --git a/third_party/proto/google/protobuf/any.proto b/third_party/proto/google/protobuf/any.proto deleted file mode 100644 index 58b511583..000000000 --- a/third_party/proto/google/protobuf/any.proto +++ /dev/null @@ -1,164 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -syntax = "proto3"; - -package google.protobuf; - -import "gogoproto/gogo.proto"; - -option csharp_namespace = "Google.Protobuf.WellKnownTypes"; -option go_package = "types"; -option java_package = "com.google.protobuf"; -option java_outer_classname = "AnyProto"; -option java_multiple_files = true; -option objc_class_prefix = "GPB"; - -// `Any` contains an arbitrary serialized protocol buffer message along with a -// URL that describes the type of the serialized message. -// -// Protobuf library provides support to pack/unpack Any values in the form -// of utility functions or additional generated methods of the Any type. -// -// Example 1: Pack and unpack a message in C++. -// -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } -// -// Example 2: Pack and unpack a message in Java. -// -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := ptypes.MarshalAny(foo) -// ... -// foo := &pb.Foo{} -// if err := ptypes.UnmarshalAny(any, foo); err != nil { -// ... -// } -// -// The pack methods provided by protobuf library will by default use -// 'type.googleapis.com/full.type.name' as the type URL and the unpack -// methods only use the fully qualified type name after the last '/' -// in the type URL, for example "foo.bar.com/x/y.z" will yield type -// name "y.z". -// -// -// JSON -// ==== -// The JSON representation of an `Any` value uses the regular -// representation of the deserialized, embedded message, with an -// additional field `@type` which contains the type URL. Example: -// -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } -// -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } -// -// If the embedded message type is well-known and has a custom JSON -// representation, that representation will be embedded adding a field -// `value` which holds the custom JSON in addition to the `@type` -// field. Example (for message [google.protobuf.Duration][]): -// -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } -// -message Any { - // A URL/resource name that uniquely identifies the type of the serialized - // protocol buffer message. This string must contain at least - // one "/" character. The last segment of the URL's path must represent - // the fully qualified name of the type (as in - // `path/google.protobuf.Duration`). The name should be in a canonical form - // (e.g., leading "." is not accepted). - // - // In practice, teams usually precompile into the binary all types that they - // expect it to use in the context of Any. However, for URLs which use the - // scheme `http`, `https`, or no scheme, one can optionally set up a type - // server that maps type URLs to message definitions as follows: - // - // * If no scheme is provided, `https` is assumed. - // * An HTTP GET on the URL must yield a [google.protobuf.Type][] - // value in binary format, or produce an error. - // * Applications are allowed to cache lookup results based on the - // URL, or have them precompiled into a binary to avoid any - // lookup. Therefore, binary compatibility needs to be preserved - // on changes to types. (Use versioned type names to manage - // breaking changes.) - // - // Note: this functionality is not currently available in the official - // protobuf release, and it is not used for type URLs beginning with - // type.googleapis.com. - // - // Schemes other than `http`, `https` (or the empty scheme) might be - // used with implementation specific semantics. - // - string type_url = 1; - - // Must be a valid serialized protocol buffer of the above specified type. - bytes value = 2; - - option (gogoproto.typedecl) = false; - option (gogoproto.goproto_stringer) = false; - option (gogoproto.gostring) = false; - option (gogoproto.stringer) = false; -} - -option (gogoproto.goproto_registration) = false; diff --git a/third_party/proto/tendermint/abci/types.proto b/third_party/proto/tendermint/abci/types.proto deleted file mode 100644 index 2cbcabb29..000000000 --- a/third_party/proto/tendermint/abci/types.proto +++ /dev/null @@ -1,407 +0,0 @@ -syntax = "proto3"; -package tendermint.abci; - -option go_package = "github.com/tendermint/tendermint/abci/types"; - -// For more information on gogo.proto, see: -// https://github.com/gogo/protobuf/blob/master/extensions.md -import "tendermint/crypto/proof.proto"; -import "tendermint/types/types.proto"; -import "tendermint/crypto/keys.proto"; -import "tendermint/types/params.proto"; -import "google/protobuf/timestamp.proto"; -import "gogoproto/gogo.proto"; - -// This file is copied from http://github.com/tendermint/abci -// NOTE: When using custom types, mind the warnings. -// https://github.com/gogo/protobuf/blob/master/custom_types.md#warnings-and-issues - -//---------------------------------------- -// Request types - -message Request { - oneof value { - RequestEcho echo = 1; - RequestFlush flush = 2; - RequestInfo info = 3; - RequestSetOption set_option = 4; - RequestInitChain init_chain = 5; - RequestQuery query = 6; - RequestBeginBlock begin_block = 7; - RequestCheckTx check_tx = 8; - RequestDeliverTx deliver_tx = 9; - RequestEndBlock end_block = 10; - RequestCommit commit = 11; - RequestListSnapshots list_snapshots = 12; - RequestOfferSnapshot offer_snapshot = 13; - RequestLoadSnapshotChunk load_snapshot_chunk = 14; - RequestApplySnapshotChunk apply_snapshot_chunk = 15; - } -} - -message RequestEcho { - string message = 1; -} - -message RequestFlush {} - -message RequestInfo { - string version = 1; - uint64 block_version = 2; - uint64 p2p_version = 3; -} - -// nondeterministic -message RequestSetOption { - string key = 1; - string value = 2; -} - -message RequestInitChain { - google.protobuf.Timestamp time = 1 - [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; - string chain_id = 2; - ConsensusParams consensus_params = 3; - repeated ValidatorUpdate validators = 4 [(gogoproto.nullable) = false]; - bytes app_state_bytes = 5; - int64 initial_height = 6; -} - -message RequestQuery { - bytes data = 1; - string path = 2; - int64 height = 3; - bool prove = 4; -} - -message RequestBeginBlock { - bytes hash = 1; - tendermint.types.Header header = 2 [(gogoproto.nullable) = false]; - LastCommitInfo last_commit_info = 3 [(gogoproto.nullable) = false]; - repeated Evidence byzantine_validators = 4 [(gogoproto.nullable) = false]; -} - -enum CheckTxType { - NEW = 0 [(gogoproto.enumvalue_customname) = "New"]; - RECHECK = 1 [(gogoproto.enumvalue_customname) = "Recheck"]; -} - -message RequestCheckTx { - bytes tx = 1; - CheckTxType type = 2; -} - -message RequestDeliverTx { - bytes tx = 1; -} - -message RequestEndBlock { - int64 height = 1; -} - -message RequestCommit {} - -// lists available snapshots -message RequestListSnapshots { -} - -// offers a snapshot to the application -message RequestOfferSnapshot { - Snapshot snapshot = 1; // snapshot offered by peers - bytes app_hash = 2; // light client-verified app hash for snapshot height -} - -// loads a snapshot chunk -message RequestLoadSnapshotChunk { - uint64 height = 1; - uint32 format = 2; - uint32 chunk = 3; -} - -// Applies a snapshot chunk -message RequestApplySnapshotChunk { - uint32 index = 1; - bytes chunk = 2; - string sender = 3; -} - -//---------------------------------------- -// Response types - -message Response { - oneof value { - ResponseException exception = 1; - ResponseEcho echo = 2; - ResponseFlush flush = 3; - ResponseInfo info = 4; - ResponseSetOption set_option = 5; - ResponseInitChain init_chain = 6; - ResponseQuery query = 7; - ResponseBeginBlock begin_block = 8; - ResponseCheckTx check_tx = 9; - ResponseDeliverTx deliver_tx = 10; - ResponseEndBlock end_block = 11; - ResponseCommit commit = 12; - ResponseListSnapshots list_snapshots = 13; - ResponseOfferSnapshot offer_snapshot = 14; - ResponseLoadSnapshotChunk load_snapshot_chunk = 15; - ResponseApplySnapshotChunk apply_snapshot_chunk = 16; - } -} - -// nondeterministic -message ResponseException { - string error = 1; -} - -message ResponseEcho { - string message = 1; -} - -message ResponseFlush {} - -message ResponseInfo { - string data = 1; - - string version = 2; - uint64 app_version = 3; - - int64 last_block_height = 4; - bytes last_block_app_hash = 5; -} - -// nondeterministic -message ResponseSetOption { - uint32 code = 1; - // bytes data = 2; - string log = 3; - string info = 4; -} - -message ResponseInitChain { - ConsensusParams consensus_params = 1; - repeated ValidatorUpdate validators = 2 [(gogoproto.nullable) = false]; - bytes app_hash = 3; -} - -message ResponseQuery { - uint32 code = 1; - // bytes data = 2; // use "value" instead. - string log = 3; // nondeterministic - string info = 4; // nondeterministic - int64 index = 5; - bytes key = 6; - bytes value = 7; - tendermint.crypto.ProofOps proof_ops = 8; - int64 height = 9; - string codespace = 10; -} - -message ResponseBeginBlock { - repeated Event events = 1 - [(gogoproto.nullable) = false, (gogoproto.jsontag) = "events,omitempty"]; -} - -message ResponseCheckTx { - uint32 code = 1; - bytes data = 2; - string log = 3; // nondeterministic - string info = 4; // nondeterministic - int64 gas_wanted = 5 [json_name = "gas_wanted"]; - int64 gas_used = 6 [json_name = "gas_used"]; - repeated Event events = 7 - [(gogoproto.nullable) = false, (gogoproto.jsontag) = "events,omitempty"]; - string codespace = 8; -} - -message ResponseDeliverTx { - uint32 code = 1; - bytes data = 2; - string log = 3; // nondeterministic - string info = 4; // nondeterministic - int64 gas_wanted = 5 [json_name = "gas_wanted"]; - int64 gas_used = 6 [json_name = "gas_used"]; - repeated Event events = 7 - [(gogoproto.nullable) = false, (gogoproto.jsontag) = "events,omitempty"]; - string codespace = 8; -} - -message ResponseEndBlock { - repeated ValidatorUpdate validator_updates = 1 - [(gogoproto.nullable) = false]; - ConsensusParams consensus_param_updates = 2; - repeated Event events = 3 - [(gogoproto.nullable) = false, (gogoproto.jsontag) = "events,omitempty"]; -} - -message ResponseCommit { - // reserve 1 - bytes data = 2; - int64 retain_height = 3; -} - -message ResponseListSnapshots { - repeated Snapshot snapshots = 1; -} - -message ResponseOfferSnapshot { - Result result = 1; - - enum Result { - UNKNOWN = 0; // Unknown result, abort all snapshot restoration - ACCEPT = 1; // Snapshot accepted, apply chunks - ABORT = 2; // Abort all snapshot restoration - REJECT = 3; // Reject this specific snapshot, try others - REJECT_FORMAT = 4; // Reject all snapshots of this format, try others - REJECT_SENDER = 5; // Reject all snapshots from the sender(s), try others - } -} - -message ResponseLoadSnapshotChunk { - bytes chunk = 1; -} - -message ResponseApplySnapshotChunk { - Result result = 1; - repeated uint32 refetch_chunks = 2; // Chunks to refetch and reapply - repeated string reject_senders = 3; // Chunk senders to reject and ban - - enum Result { - UNKNOWN = 0; // Unknown result, abort all snapshot restoration - ACCEPT = 1; // Chunk successfully accepted - ABORT = 2; // Abort all snapshot restoration - RETRY = 3; // Retry chunk (combine with refetch and reject) - RETRY_SNAPSHOT = 4; // Retry snapshot (combine with refetch and reject) - REJECT_SNAPSHOT = 5; // Reject this snapshot, try others - } -} - -//---------------------------------------- -// Misc. - -// ConsensusParams contains all consensus-relevant parameters -// that can be adjusted by the abci app -message ConsensusParams { - BlockParams block = 1; - tendermint.types.EvidenceParams evidence = 2; - tendermint.types.ValidatorParams validator = 3; - tendermint.types.VersionParams version = 4; -} - -// BlockParams contains limits on the block size. -message BlockParams { - // Note: must be greater than 0 - int64 max_bytes = 1; - // Note: must be greater or equal to -1 - int64 max_gas = 2; -} - -message LastCommitInfo { - int32 round = 1; - repeated VoteInfo votes = 2 [(gogoproto.nullable) = false]; -} - -// Event allows application developers to attach additional information to -// ResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx. -// Later, transactions may be queried using these events. -message Event { - string type = 1; - repeated EventAttribute attributes = 2 [ - (gogoproto.nullable) = false, - (gogoproto.jsontag) = "attributes,omitempty" - ]; -} - -// EventAttribute is a single key-value pair, associated with an event. -message EventAttribute { - bytes key = 1; - bytes value = 2; - bool index = 3; // nondeterministic -} - -// TxResult contains results of executing the transaction. -// -// One usage is indexing transaction results. -message TxResult { - int64 height = 1; - uint32 index = 2; - bytes tx = 3; - ResponseDeliverTx result = 4 [(gogoproto.nullable) = false]; -} - -//---------------------------------------- -// Blockchain Types - -// Validator -message Validator { - bytes address = 1; // The first 20 bytes of SHA256(public key) - // PubKey pub_key = 2 [(gogoproto.nullable)=false]; - int64 power = 3; // The voting power -} - -// ValidatorUpdate -message ValidatorUpdate { - tendermint.crypto.PublicKey pub_key = 1 [(gogoproto.nullable) = false]; - int64 power = 2; -} - -// VoteInfo -message VoteInfo { - Validator validator = 1 [(gogoproto.nullable) = false]; - bool signed_last_block = 2; -} - -enum EvidenceType { - UNKNOWN = 0; - DUPLICATE_VOTE = 1; - LIGHT_CLIENT_ATTACK = 2; -} - -message Evidence { - EvidenceType type = 1; - // The offending validator - Validator validator = 2 [(gogoproto.nullable) = false]; - // The height when the offense occurred - int64 height = 3; - // The corresponding time where the offense occurred - google.protobuf.Timestamp time = 4 [ - (gogoproto.nullable) = false, - (gogoproto.stdtime) = true - ]; - // Total voting power of the validator set in case the ABCI application does - // not store historical validators. - // https://github.com/tendermint/tendermint/issues/4581 - int64 total_voting_power = 5; -} - -//---------------------------------------- -// State Sync Types - -message Snapshot { - uint64 height = 1; // The height at which the snapshot was taken - uint32 format = 2; // The application-specific snapshot format - uint32 chunks = 3; // Number of chunks in the snapshot - bytes hash = 4; // Arbitrary snapshot hash, equal only if identical - bytes metadata = 5; // Arbitrary application metadata -} - -//---------------------------------------- -// Service Definition - -service ABCIApplication { - rpc Echo(RequestEcho) returns (ResponseEcho); - rpc Flush(RequestFlush) returns (ResponseFlush); - rpc Info(RequestInfo) returns (ResponseInfo); - rpc SetOption(RequestSetOption) returns (ResponseSetOption); - rpc DeliverTx(RequestDeliverTx) returns (ResponseDeliverTx); - rpc CheckTx(RequestCheckTx) returns (ResponseCheckTx); - rpc Query(RequestQuery) returns (ResponseQuery); - rpc Commit(RequestCommit) returns (ResponseCommit); - rpc InitChain(RequestInitChain) returns (ResponseInitChain); - rpc BeginBlock(RequestBeginBlock) returns (ResponseBeginBlock); - rpc EndBlock(RequestEndBlock) returns (ResponseEndBlock); - rpc ListSnapshots(RequestListSnapshots) returns (ResponseListSnapshots); - rpc OfferSnapshot(RequestOfferSnapshot) returns (ResponseOfferSnapshot); - rpc LoadSnapshotChunk(RequestLoadSnapshotChunk) returns (ResponseLoadSnapshotChunk); - rpc ApplySnapshotChunk(RequestApplySnapshotChunk) returns (ResponseApplySnapshotChunk); -} diff --git a/third_party/proto/tendermint/crypto/keys.proto b/third_party/proto/tendermint/crypto/keys.proto deleted file mode 100644 index 16fd7adf3..000000000 --- a/third_party/proto/tendermint/crypto/keys.proto +++ /dev/null @@ -1,17 +0,0 @@ -syntax = "proto3"; -package tendermint.crypto; - -option go_package = "github.com/tendermint/tendermint/proto/tendermint/crypto"; - -import "gogoproto/gogo.proto"; - -// PublicKey defines the keys available for use with Tendermint Validators -message PublicKey { - option (gogoproto.compare) = true; - option (gogoproto.equal) = true; - - oneof sum { - bytes ed25519 = 1; - bytes secp256k1 = 2; - } -} diff --git a/third_party/proto/tendermint/crypto/proof.proto b/third_party/proto/tendermint/crypto/proof.proto deleted file mode 100644 index 975df7685..000000000 --- a/third_party/proto/tendermint/crypto/proof.proto +++ /dev/null @@ -1,41 +0,0 @@ -syntax = "proto3"; -package tendermint.crypto; - -option go_package = "github.com/tendermint/tendermint/proto/tendermint/crypto"; - -import "gogoproto/gogo.proto"; - -message Proof { - int64 total = 1; - int64 index = 2; - bytes leaf_hash = 3; - repeated bytes aunts = 4; -} - -message ValueOp { - // Encoded in ProofOp.Key. - bytes key = 1; - - // To encode in ProofOp.Data - Proof proof = 2; -} - -message DominoOp { - string key = 1; - string input = 2; - string output = 3; -} - -// ProofOp defines an operation used for calculating Merkle root -// The data could be arbitrary format, providing nessecary data -// for example neighbouring node hash -message ProofOp { - string type = 1; - bytes key = 2; - bytes data = 3; -} - -// ProofOps is Merkle proof defined by the list of ProofOps -message ProofOps { - repeated ProofOp ops = 1 [(gogoproto.nullable) = false]; -} diff --git a/third_party/proto/tendermint/libs/bits/types.proto b/third_party/proto/tendermint/libs/bits/types.proto deleted file mode 100644 index 3111d113a..000000000 --- a/third_party/proto/tendermint/libs/bits/types.proto +++ /dev/null @@ -1,9 +0,0 @@ -syntax = "proto3"; -package tendermint.libs.bits; - -option go_package = "github.com/tendermint/tendermint/proto/tendermint/libs/bits"; - -message BitArray { - int64 bits = 1; - repeated uint64 elems = 2; -} diff --git a/third_party/proto/tendermint/p2p/types.proto b/third_party/proto/tendermint/p2p/types.proto deleted file mode 100644 index 0d42ea400..000000000 --- a/third_party/proto/tendermint/p2p/types.proto +++ /dev/null @@ -1,34 +0,0 @@ -syntax = "proto3"; -package tendermint.p2p; - -option go_package = "github.com/tendermint/tendermint/proto/tendermint/p2p"; - -import "gogoproto/gogo.proto"; - -message NetAddress { - string id = 1 [(gogoproto.customname) = "ID"]; - string ip = 2 [(gogoproto.customname) = "IP"]; - uint32 port = 3; -} - -message ProtocolVersion { - uint64 p2p = 1 [(gogoproto.customname) = "P2P"]; - uint64 block = 2; - uint64 app = 3; -} - -message DefaultNodeInfo { - ProtocolVersion protocol_version = 1 [(gogoproto.nullable) = false]; - string default_node_id = 2 [(gogoproto.customname) = "DefaultNodeID"]; - string listen_addr = 3; - string network = 4; - string version = 5; - bytes channels = 6; - string moniker = 7; - DefaultNodeInfoOther other = 8 [(gogoproto.nullable) = false]; -} - -message DefaultNodeInfoOther { - string tx_index = 1; - string rpc_address = 2 [(gogoproto.customname) = "RPCAddress"]; -} diff --git a/third_party/proto/tendermint/types/block.proto b/third_party/proto/tendermint/types/block.proto deleted file mode 100644 index 84e9bb15d..000000000 --- a/third_party/proto/tendermint/types/block.proto +++ /dev/null @@ -1,15 +0,0 @@ -syntax = "proto3"; -package tendermint.types; - -option go_package = "github.com/tendermint/tendermint/proto/tendermint/types"; - -import "gogoproto/gogo.proto"; -import "tendermint/types/types.proto"; -import "tendermint/types/evidence.proto"; - -message Block { - Header header = 1 [(gogoproto.nullable) = false]; - Data data = 2 [(gogoproto.nullable) = false]; - tendermint.types.EvidenceList evidence = 3 [(gogoproto.nullable) = false]; - Commit last_commit = 4; -} diff --git a/third_party/proto/tendermint/types/evidence.proto b/third_party/proto/tendermint/types/evidence.proto deleted file mode 100644 index 3b234571b..000000000 --- a/third_party/proto/tendermint/types/evidence.proto +++ /dev/null @@ -1,38 +0,0 @@ -syntax = "proto3"; -package tendermint.types; - -option go_package = "github.com/tendermint/tendermint/proto/tendermint/types"; - -import "gogoproto/gogo.proto"; -import "google/protobuf/timestamp.proto"; -import "tendermint/types/types.proto"; -import "tendermint/types/validator.proto"; - -message Evidence { - oneof sum { - DuplicateVoteEvidence duplicate_vote_evidence = 1; - LightClientAttackEvidence light_client_attack_evidence = 2; - } -} - -// DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes. -message DuplicateVoteEvidence { - tendermint.types.Vote vote_a = 1; - tendermint.types.Vote vote_b = 2; - int64 total_voting_power = 3; - int64 validator_power = 4; - google.protobuf.Timestamp timestamp = 5 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; -} - -// LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client. -message LightClientAttackEvidence { - tendermint.types.LightBlock conflicting_block = 1; - int64 common_height = 2; - repeated tendermint.types.Validator byzantine_validators = 3; - int64 total_voting_power = 4; - google.protobuf.Timestamp timestamp = 5 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; -} - -message EvidenceList { - repeated Evidence evidence = 1 [(gogoproto.nullable) = false]; -} diff --git a/third_party/proto/tendermint/types/params.proto b/third_party/proto/tendermint/types/params.proto deleted file mode 100644 index 0de7d846f..000000000 --- a/third_party/proto/tendermint/types/params.proto +++ /dev/null @@ -1,80 +0,0 @@ -syntax = "proto3"; -package tendermint.types; - -option go_package = "github.com/tendermint/tendermint/proto/tendermint/types"; - -import "gogoproto/gogo.proto"; -import "google/protobuf/duration.proto"; - -option (gogoproto.equal_all) = true; - -// ConsensusParams contains consensus critical parameters that determine the -// validity of blocks. -message ConsensusParams { - BlockParams block = 1 [(gogoproto.nullable) = false]; - EvidenceParams evidence = 2 [(gogoproto.nullable) = false]; - ValidatorParams validator = 3 [(gogoproto.nullable) = false]; - VersionParams version = 4 [(gogoproto.nullable) = false]; -} - -// BlockParams contains limits on the block size. -message BlockParams { - // Max block size, in bytes. - // Note: must be greater than 0 - int64 max_bytes = 1; - // Max gas per block. - // Note: must be greater or equal to -1 - int64 max_gas = 2; - // Minimum time increment between consecutive blocks (in milliseconds) If the - // block header timestamp is ahead of the system clock, decrease this value. - // - // Not exposed to the application. - int64 time_iota_ms = 3; -} - -// EvidenceParams determine how we handle evidence of malfeasance. -message EvidenceParams { - // Max age of evidence, in blocks. - // - // The basic formula for calculating this is: MaxAgeDuration / {average block - // time}. - int64 max_age_num_blocks = 1; - - // Max age of evidence, in time. - // - // It should correspond with an app's "unbonding period" or other similar - // mechanism for handling [Nothing-At-Stake - // attacks](https://github.com/ethereum/wiki/wiki/Proof-of-Stake-FAQ#what-is-the-nothing-at-stake-problem-and-how-can-it-be-fixed). - google.protobuf.Duration max_age_duration = 2 - [(gogoproto.nullable) = false, (gogoproto.stdduration) = true]; - - // This sets the maximum size of total evidence in bytes that can be committed in a single block. - // and should fall comfortably under the max block bytes. - // Default is 1048576 or 1MB - int64 max_bytes = 3; -} - -// ValidatorParams restrict the public key types validators can use. -// NOTE: uses ABCI pubkey naming, not Amino names. -message ValidatorParams { - option (gogoproto.populate) = true; - option (gogoproto.equal) = true; - - repeated string pub_key_types = 1; -} - -// VersionParams contains the ABCI application version. -message VersionParams { - option (gogoproto.populate) = true; - option (gogoproto.equal) = true; - - uint64 app_version = 1; -} - -// HashedParams is a subset of ConsensusParams. -// -// It is hashed into the Header.ConsensusHash. -message HashedParams { - int64 block_max_bytes = 1; - int64 block_max_gas = 2; -} diff --git a/third_party/proto/tendermint/types/types.proto b/third_party/proto/tendermint/types/types.proto deleted file mode 100644 index 7f7ea74ca..000000000 --- a/third_party/proto/tendermint/types/types.proto +++ /dev/null @@ -1,157 +0,0 @@ -syntax = "proto3"; -package tendermint.types; - -option go_package = "github.com/tendermint/tendermint/proto/tendermint/types"; - -import "gogoproto/gogo.proto"; -import "google/protobuf/timestamp.proto"; -import "tendermint/crypto/proof.proto"; -import "tendermint/version/types.proto"; -import "tendermint/types/validator.proto"; - -// BlockIdFlag indicates which BlcokID the signature is for -enum BlockIDFlag { - option (gogoproto.goproto_enum_stringer) = true; - option (gogoproto.goproto_enum_prefix) = false; - - BLOCK_ID_FLAG_UNKNOWN = 0 [(gogoproto.enumvalue_customname) = "BlockIDFlagUnknown"]; - BLOCK_ID_FLAG_ABSENT = 1 [(gogoproto.enumvalue_customname) = "BlockIDFlagAbsent"]; - BLOCK_ID_FLAG_COMMIT = 2 [(gogoproto.enumvalue_customname) = "BlockIDFlagCommit"]; - BLOCK_ID_FLAG_NIL = 3 [(gogoproto.enumvalue_customname) = "BlockIDFlagNil"]; -} - -// SignedMsgType is a type of signed message in the consensus. -enum SignedMsgType { - option (gogoproto.goproto_enum_stringer) = true; - option (gogoproto.goproto_enum_prefix) = false; - - SIGNED_MSG_TYPE_UNKNOWN = 0 [(gogoproto.enumvalue_customname) = "UnknownType"]; - // Votes - SIGNED_MSG_TYPE_PREVOTE = 1 [(gogoproto.enumvalue_customname) = "PrevoteType"]; - SIGNED_MSG_TYPE_PRECOMMIT = 2 [(gogoproto.enumvalue_customname) = "PrecommitType"]; - - // Proposals - SIGNED_MSG_TYPE_PROPOSAL = 32 [(gogoproto.enumvalue_customname) = "ProposalType"]; -} - -// PartsetHeader -message PartSetHeader { - uint32 total = 1; - bytes hash = 2; -} - -message Part { - uint32 index = 1; - bytes bytes = 2; - tendermint.crypto.Proof proof = 3 [(gogoproto.nullable) = false]; -} - -// BlockID -message BlockID { - bytes hash = 1; - PartSetHeader part_set_header = 2 [(gogoproto.nullable) = false]; -} - -// -------------------------------- - -// Header defines the structure of a Tendermint block header. -message Header { - // basic block info - tendermint.version.Consensus version = 1 [(gogoproto.nullable) = false]; - string chain_id = 2 [(gogoproto.customname) = "ChainID"]; - int64 height = 3; - google.protobuf.Timestamp time = 4 [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; - - // prev block info - BlockID last_block_id = 5 [(gogoproto.nullable) = false]; - - // hashes of block data - bytes last_commit_hash = 6; // commit from validators from the last block - bytes data_hash = 7; // transactions - - // hashes from the app output from the prev block - bytes validators_hash = 8; // validators for the current block - bytes next_validators_hash = 9; // validators for the next block - bytes consensus_hash = 10; // consensus params for current block - bytes app_hash = 11; // state after txs from the previous block - bytes last_results_hash = 12; // root hash of all results from the txs from the previous block - - // consensus info - bytes evidence_hash = 13; // evidence included in the block - bytes proposer_address = 14; // original proposer of the block -} - -// Data contains the set of transactions included in the block -message Data { - // Txs that will be applied by state @ block.Height+1. - // NOTE: not all txs here are valid. We're just agreeing on the order first. - // This means that block.AppHash does not include these txs. - repeated bytes txs = 1; -} - -// Vote represents a prevote, precommit, or commit vote from validators for -// consensus. -message Vote { - SignedMsgType type = 1; - int64 height = 2; - int32 round = 3; - BlockID block_id = 4 - [(gogoproto.nullable) = false, (gogoproto.customname) = "BlockID"]; // zero if vote is nil. - google.protobuf.Timestamp timestamp = 5 - [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; - bytes validator_address = 6; - int32 validator_index = 7; - bytes signature = 8; -} - -// Commit contains the evidence that a block was committed by a set of validators. -message Commit { - int64 height = 1; - int32 round = 2; - BlockID block_id = 3 [(gogoproto.nullable) = false, (gogoproto.customname) = "BlockID"]; - repeated CommitSig signatures = 4 [(gogoproto.nullable) = false]; -} - -// CommitSig is a part of the Vote included in a Commit. -message CommitSig { - BlockIDFlag block_id_flag = 1; - bytes validator_address = 2; - google.protobuf.Timestamp timestamp = 3 - [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; - bytes signature = 4; -} - -message Proposal { - SignedMsgType type = 1; - int64 height = 2; - int32 round = 3; - int32 pol_round = 4; - BlockID block_id = 5 [(gogoproto.customname) = "BlockID", (gogoproto.nullable) = false]; - google.protobuf.Timestamp timestamp = 6 - [(gogoproto.nullable) = false, (gogoproto.stdtime) = true]; - bytes signature = 7; -} - -message SignedHeader { - Header header = 1; - Commit commit = 2; -} - -message LightBlock { - SignedHeader signed_header = 1; - tendermint.types.ValidatorSet validator_set = 2; -} - -message BlockMeta { - BlockID block_id = 1 [(gogoproto.customname) = "BlockID", (gogoproto.nullable) = false]; - int64 block_size = 2; - Header header = 3 [(gogoproto.nullable) = false]; - int64 num_txs = 4; -} - -// TxProof represents a Merkle proof of the presence of a transaction in the Merkle tree. -message TxProof { - bytes root_hash = 1; - bytes data = 2; - tendermint.crypto.Proof proof = 3; -} diff --git a/third_party/proto/tendermint/types/validator.proto b/third_party/proto/tendermint/types/validator.proto deleted file mode 100644 index 49860b96d..000000000 --- a/third_party/proto/tendermint/types/validator.proto +++ /dev/null @@ -1,25 +0,0 @@ -syntax = "proto3"; -package tendermint.types; - -option go_package = "github.com/tendermint/tendermint/proto/tendermint/types"; - -import "gogoproto/gogo.proto"; -import "tendermint/crypto/keys.proto"; - -message ValidatorSet { - repeated Validator validators = 1; - Validator proposer = 2; - int64 total_voting_power = 3; -} - -message Validator { - bytes address = 1; - tendermint.crypto.PublicKey pub_key = 2 [(gogoproto.nullable) = false]; - int64 voting_power = 3; - int64 proposer_priority = 4; -} - -message SimpleValidator { - tendermint.crypto.PublicKey pub_key = 1; - int64 voting_power = 2; -} diff --git a/third_party/proto/tendermint/version/types.proto b/third_party/proto/tendermint/version/types.proto deleted file mode 100644 index 6061868bd..000000000 --- a/third_party/proto/tendermint/version/types.proto +++ /dev/null @@ -1,24 +0,0 @@ -syntax = "proto3"; -package tendermint.version; - -option go_package = "github.com/tendermint/tendermint/proto/tendermint/version"; - -import "gogoproto/gogo.proto"; - -// App includes the protocol and software version for the application. -// This information is included in ResponseInfo. The App.Protocol can be -// updated in ResponseEndBlock. -message App { - uint64 protocol = 1; - string software = 2; -} - -// Consensus captures the consensus rules for processing a block in the blockchain, -// including all blockchain data structures and the rules of the application's -// state transition machine. -message Consensus { - option (gogoproto.equal) = true; - - uint64 block = 1; - uint64 app = 2; -} diff --git a/tools/tools.go b/tools/tools.go new file mode 100644 index 000000000..c2f88d474 --- /dev/null +++ b/tools/tools.go @@ -0,0 +1,15 @@ +//go:build tools + +package tools + +import ( + _ "github.com/bufbuild/buf/cmd/buf" + _ "github.com/cosmos/cosmos-proto/cmd/protoc-gen-go-pulsar" + _ "github.com/cosmos/gogoproto/protoc-gen-gocosmos" + _ "github.com/golang/protobuf/protoc-gen-go" + _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway" + _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger" + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2" + _ "google.golang.org/grpc/cmd/protoc-gen-go-grpc" + _ "google.golang.org/protobuf/cmd/protoc-gen-go" +) diff --git a/ts-client/client.ts b/ts-client/client.ts new file mode 100755 index 000000000..da4df98e4 --- /dev/null +++ b/ts-client/client.ts @@ -0,0 +1,171 @@ +/// +import { + GeneratedType, + OfflineSigner, + EncodeObject, + Registry, +} from "@cosmjs/proto-signing"; +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient } from "@cosmjs/stargate"; +import { Env } from "./env"; +import { UnionToIntersection, Return, Constructor } from "./helpers"; +import { Module } from "./modules"; +import { EventEmitter } from "events"; +import { ChainInfo } from "@keplr-wallet/types"; + +const defaultFee = { + amount: [], + gas: "200000", +}; + +export class IgniteClient extends EventEmitter { + static plugins: Module[] = []; + env: Env; + signer?: OfflineSigner; + registry: Array<[string, GeneratedType]> = []; + static plugin(plugin: T) { + const currentPlugins = this.plugins; + + class AugmentedClient extends this { + static plugins = currentPlugins.concat(plugin); + } + + if (Array.isArray(plugin)) { + type Extension = UnionToIntersection['module']> + return AugmentedClient as typeof IgniteClient & Constructor; + } + + type Extension = Return['module'] + return AugmentedClient as typeof IgniteClient & Constructor; + } + + async signAndBroadcast(msgs: EncodeObject[], fee: StdFee, memo: string) { + if (this.signer) { + const { address } = (await this.signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(this.env.rpcURL, this.signer, { registry: new Registry(this.registry), prefix: this.env.prefix }); + return await signingClient.signAndBroadcast(address, msgs, fee ? fee : defaultFee, memo) + } else { + throw new Error(" Signer is not present."); + } + } + + constructor(env: Env, signer?: OfflineSigner) { + super(); + this.env = env; + this.setMaxListeners(0); + this.signer = signer; + const classConstructor = this.constructor as typeof IgniteClient; + classConstructor.plugins.forEach(plugin => { + const pluginInstance = plugin(this); + Object.assign(this, pluginInstance.module) + if (this.registry) { + this.registry = this.registry.concat(pluginInstance.registry) + } + }); + } + useSigner(signer: OfflineSigner) { + this.signer = signer; + this.emit("signer-changed", this.signer); + } + removeSigner() { + this.signer = undefined; + this.emit("signer-changed", this.signer); + } + async useKeplr(keplrChainInfo: Partial = {}) { + // Using queryClients directly because BaseClient has no knowledge of the modules at this stage + try { + const queryClient = ( + await import("./cosmos.base.tendermint.v1beta1/module") + ).queryClient; + const bankQueryClient = (await import("./cosmos.bank.v1beta1/module")) + .queryClient; + + const stakingQueryClient = (await import("./cosmos.staking.v1beta1/module")).queryClient; + const stakingqc = stakingQueryClient({ addr: this.env.apiURL }); + const staking = await (await stakingqc.queryParams()).data; + + const qc = queryClient({ addr: this.env.apiURL }); + const node_info = await (await qc.serviceGetNodeInfo()).data; + const chainId = node_info.default_node_info?.network ?? ""; + const chainName = chainId?.toUpperCase() + " Network"; + const bankqc = bankQueryClient({ addr: this.env.apiURL }); + const tokens = await (await bankqc.queryTotalSupply()).data; + const addrPrefix = this.env.prefix ?? "cosmos"; + const rpc = this.env.rpcURL; + const rest = this.env.apiURL; + + let bip44 = { + coinType: 118, + }; + + let bech32Config = { + bech32PrefixAccAddr: addrPrefix, + bech32PrefixAccPub: addrPrefix + "pub", + bech32PrefixValAddr: addrPrefix + "valoper", + bech32PrefixValPub: addrPrefix + "valoperpub", + bech32PrefixConsAddr: addrPrefix + "valcons", + bech32PrefixConsPub: addrPrefix + "valconspub", + }; + + let currencies = + tokens.supply?.map((x) => { + const y = { + coinDenom: x.denom?.toUpperCase() ?? "", + coinMinimalDenom: x.denom ?? "", + coinDecimals: 0, + }; + return y; + }) ?? []; + + + let stakeCurrency = { + coinDenom: staking.params?.bond_denom?.toUpperCase() ?? "", + coinMinimalDenom: staking.params?.bond_denom ?? "", + coinDecimals: 0, + }; + + let feeCurrencies = + tokens.supply?.map((x) => { + const y = { + coinDenom: x.denom?.toUpperCase() ?? "", + coinMinimalDenom: x.denom ?? "", + coinDecimals: 0, + }; + return y; + }) ?? []; + + let coinType = 118; + + if (chainId) { + const suggestOptions: ChainInfo = { + chainId, + chainName, + rpc, + rest, + stakeCurrency, + bip44, + bech32Config, + currencies, + feeCurrencies, + coinType, + ...keplrChainInfo, + }; + await window.keplr.experimentalSuggestChain(suggestOptions); + + window.keplr.defaultOptions = { + sign: { + preferNoSetFee: true, + preferNoSetMemo: true, + }, + }; + } + await window.keplr.enable(chainId); + this.signer = window.keplr.getOfflineSigner(chainId); + this.emit("signer-changed", this.signer); + } catch (e) { + throw new Error( + "Could not load tendermint, staking and bank modules. Please ensure your client loads them to use useKeplr()" + ); + } + } +} \ No newline at end of file diff --git a/ts-client/cosmos.auth.v1beta1/index.ts b/ts-client/cosmos.auth.v1beta1/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/cosmos.auth.v1beta1/module.ts b/ts-client/cosmos.auth.v1beta1/module.ts new file mode 100755 index 000000000..d4dcc0925 --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/module.ts @@ -0,0 +1,104 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; + +import { BaseAccount as typeBaseAccount} from "./types" +import { ModuleAccount as typeModuleAccount} from "./types" +import { ModuleCredential as typeModuleCredential} from "./types" +import { Params as typeParams} from "./types" + +export { }; + + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + BaseAccount: getStructure(typeBaseAccount.fromPartial({})), + ModuleAccount: getStructure(typeModuleAccount.fromPartial({})), + ModuleCredential: getStructure(typeModuleCredential.fromPartial({})), + Params: getStructure(typeParams.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + CosmosAuthV1Beta1: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/cosmos.auth.v1beta1/registry.ts b/ts-client/cosmos.auth.v1beta1/registry.ts new file mode 100755 index 000000000..26157e4ff --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/registry.ts @@ -0,0 +1,7 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; + +const msgTypes: Array<[string, GeneratedType]> = [ + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/cosmos.auth.v1beta1/rest.ts b/ts-client/cosmos.auth.v1beta1/rest.ts new file mode 100644 index 000000000..26f0e65f5 --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/rest.ts @@ -0,0 +1,751 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +/** + * Params defines the parameters for the auth module. + */ +export interface Authv1Beta1Params { + /** @format uint64 */ + max_memo_characters?: string; + + /** @format uint64 */ + tx_sig_limit?: string; + + /** @format uint64 */ + tx_size_cost_per_byte?: string; + + /** @format uint64 */ + sig_verify_cost_ed25519?: string; + + /** @format uint64 */ + sig_verify_cost_secp256k1?: string; +} + +/** +* `Any` contains an arbitrary serialized protocol buffer message along with a +URL that describes the type of the serialized message. + +Protobuf library provides support to pack/unpack Any values in the form +of utility functions or additional generated methods of the Any type. + +Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + +Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + +The pack methods provided by protobuf library will by default use +'type.googleapis.com/full.type.name' as the type URL and the unpack +methods only use the fully qualified type name after the last '/' +in the type URL, for example "foo.bar.com/x/y.z" will yield type +name "y.z". + + +JSON +==== +The JSON representation of an `Any` value uses the regular +representation of the deserialized, embedded message, with an +additional field `@type` which contains the type URL. Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + +If the embedded message type is well-known and has a custom JSON +representation, that representation will be embedded adding a field +`value` which holds the custom JSON in addition to the `@type` +field. Example (for message [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } +*/ +export interface ProtobufAny { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +/** +* AddressBytesToStringResponse is the response type for AddressString rpc method. + +Since: cosmos-sdk 0.46 +*/ +export interface V1Beta1AddressBytesToStringResponse { + address_string?: string; +} + +/** +* AddressStringToBytesResponse is the response type for AddressBytes rpc method. + +Since: cosmos-sdk 0.46 +*/ +export interface V1Beta1AddressStringToBytesResponse { + /** @format byte */ + address_bytes?: string; +} + +/** +* BaseAccount defines a base account type. It contains all the necessary fields +for basic account functionality. Any custom account type should extend this +type for additional functionality (e.g. vesting). +*/ +export interface V1Beta1BaseAccount { + address?: string; + + /** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * Example 1: Pack and unpack a message in C++. + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * Example 2: Pack and unpack a message in Java. + * Any any = Any.pack(foo); + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * Example 3: Pack and unpack a message in Python. + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * Example 4: Pack and unpack a message in Go + * foo := &pb.Foo{...} + * any, err := anypb.New(foo) + * if err != nil { + * ... + * } + * ... + * foo := &pb.Foo{} + * if err := any.UnmarshalTo(foo); err != nil { + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + */ + pub_key?: ProtobufAny; + + /** @format uint64 */ + account_number?: string; + + /** @format uint64 */ + sequence?: string; +} + +/** +* Bech32PrefixResponse is the response type for Bech32Prefix rpc method. + +Since: cosmos-sdk 0.46 +*/ +export interface V1Beta1Bech32PrefixResponse { + bech32_prefix?: string; +} + +/** +* MsgUpdateParamsResponse defines the response structure for executing a +MsgUpdateParams message. + +Since: cosmos-sdk 0.47 +*/ +export type V1Beta1MsgUpdateParamsResponse = object; + +/** +* message SomeRequest { + Foo some_parameter = 1; + PageRequest pagination = 2; + } +*/ +export interface V1Beta1PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + * @format byte + */ + key?: string; + + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + * @format uint64 + */ + offset?: string; + + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + * @format uint64 + */ + limit?: string; + + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + count_total?: boolean; + + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse?: boolean; +} + +/** +* PageResponse is to be embedded in gRPC response messages where the +corresponding request message has used PageRequest. + + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } +*/ +export interface V1Beta1PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + * @format byte + */ + next_key?: string; + + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + * @format uint64 + */ + total?: string; +} + +/** + * Since: cosmos-sdk 0.46.2 + */ +export interface V1Beta1QueryAccountAddressByIDResponse { + account_address?: string; +} + +/** +* QueryAccountInfoResponse is the Query/AccountInfo response type. + +Since: cosmos-sdk 0.47 +*/ +export interface V1Beta1QueryAccountInfoResponse { + /** info is the account info which is represented by BaseAccount. */ + info?: V1Beta1BaseAccount; +} + +/** + * QueryAccountResponse is the response type for the Query/Account RPC method. + */ +export interface V1Beta1QueryAccountResponse { + /** account defines the account of the corresponding address. */ + account?: ProtobufAny; +} + +/** +* QueryAccountsResponse is the response type for the Query/Accounts RPC method. + +Since: cosmos-sdk 0.43 +*/ +export interface V1Beta1QueryAccountsResponse { + /** accounts are the existing accounts */ + accounts?: ProtobufAny[]; + + /** pagination defines the pagination in the response. */ + pagination?: V1Beta1PageResponse; +} + +/** + * QueryModuleAccountByNameResponse is the response type for the Query/ModuleAccountByName RPC method. + */ +export interface V1Beta1QueryModuleAccountByNameResponse { + /** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * Example 1: Pack and unpack a message in C++. + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * Example 2: Pack and unpack a message in Java. + * Any any = Any.pack(foo); + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * Example 3: Pack and unpack a message in Python. + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * Example 4: Pack and unpack a message in Go + * foo := &pb.Foo{...} + * any, err := anypb.New(foo) + * if err != nil { + * ... + * } + * ... + * foo := &pb.Foo{} + * if err := any.UnmarshalTo(foo); err != nil { + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + */ + account?: ProtobufAny; +} + +/** +* QueryModuleAccountsResponse is the response type for the Query/ModuleAccounts RPC method. + +Since: cosmos-sdk 0.46 +*/ +export interface V1Beta1QueryModuleAccountsResponse { + accounts?: ProtobufAny[]; +} + +/** + * QueryParamsResponse is the response type for the Query/Params RPC method. + */ +export interface V1Beta1QueryParamsResponse { + /** params defines the parameters of the module. */ + params?: Authv1Beta1Params; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title cosmos/auth/v1beta1/auth.proto + * @version version not set + */ +export class Api extends HttpClient { + /** + * @description Since: cosmos-sdk 0.47 + * + * @tags Query + * @name QueryAccountInfo + * @summary AccountInfo queries account info which is common to all account types. + * @request GET:/cosmos/auth/v1beta1/account_info/{address} + */ + queryAccountInfo = (address: string, params: RequestParams = {}) => + this.request({ + path: `/cosmos/auth/v1beta1/account_info/${address}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * @description When called from another module, this query might consume a high amount of gas if the pagination field is incorrectly set. Since: cosmos-sdk 0.43 + * + * @tags Query + * @name QueryAccounts + * @summary Accounts returns all the existing accounts. + * @request GET:/cosmos/auth/v1beta1/accounts + */ + queryAccounts = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/cosmos/auth/v1beta1/accounts`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryAccount + * @summary Account returns account details based on address. + * @request GET:/cosmos/auth/v1beta1/accounts/{address} + */ + queryAccount = (address: string, params: RequestParams = {}) => + this.request({ + path: `/cosmos/auth/v1beta1/accounts/${address}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * @description Since: cosmos-sdk 0.46.2 + * + * @tags Query + * @name QueryAccountAddressById + * @summary AccountAddressByID returns account address based on account number. + * @request GET:/cosmos/auth/v1beta1/address_by_id/{id} + */ + queryAccountAddressByID = (id: string, query?: { account_id?: string }, params: RequestParams = {}) => + this.request({ + path: `/cosmos/auth/v1beta1/address_by_id/${id}`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * @description Since: cosmos-sdk 0.46 + * + * @tags Query + * @name QueryBech32Prefix + * @summary Bech32Prefix queries bech32Prefix + * @request GET:/cosmos/auth/v1beta1/bech32 + */ + queryBech32Prefix = (params: RequestParams = {}) => + this.request({ + path: `/cosmos/auth/v1beta1/bech32`, + method: "GET", + format: "json", + ...params, + }); + + /** + * @description Since: cosmos-sdk 0.46 + * + * @tags Query + * @name QueryAddressBytesToString + * @summary AddressBytesToString converts Account Address bytes to string + * @request GET:/cosmos/auth/v1beta1/bech32/{address_bytes} + */ + queryAddressBytesToString = (addressBytes: string, params: RequestParams = {}) => + this.request({ + path: `/cosmos/auth/v1beta1/bech32/${addressBytes}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * @description Since: cosmos-sdk 0.46 + * + * @tags Query + * @name QueryAddressStringToBytes + * @summary AddressStringToBytes converts Address string to bytes + * @request GET:/cosmos/auth/v1beta1/bech32/{address_string} + */ + queryAddressStringToBytes = (addressString: string, params: RequestParams = {}) => + this.request({ + path: `/cosmos/auth/v1beta1/bech32/${addressString}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * @description Since: cosmos-sdk 0.46 + * + * @tags Query + * @name QueryModuleAccounts + * @summary ModuleAccounts returns all the existing module accounts. + * @request GET:/cosmos/auth/v1beta1/module_accounts + */ + queryModuleAccounts = (params: RequestParams = {}) => + this.request({ + path: `/cosmos/auth/v1beta1/module_accounts`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryModuleAccountByName + * @summary ModuleAccountByName returns the module account info by module name + * @request GET:/cosmos/auth/v1beta1/module_accounts/{name} + */ + queryModuleAccountByName = (name: string, params: RequestParams = {}) => + this.request({ + path: `/cosmos/auth/v1beta1/module_accounts/${name}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryParams + * @summary Params queries all parameters. + * @request GET:/cosmos/auth/v1beta1/params + */ + queryParams = (params: RequestParams = {}) => + this.request({ + path: `/cosmos/auth/v1beta1/params`, + method: "GET", + format: "json", + ...params, + }); +} diff --git a/ts-client/cosmos.auth.v1beta1/types.ts b/ts-client/cosmos.auth.v1beta1/types.ts new file mode 100755 index 000000000..845749128 --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types.ts @@ -0,0 +1,13 @@ +import { BaseAccount } from "./types/cosmos/auth/v1beta1/auth" +import { ModuleAccount } from "./types/cosmos/auth/v1beta1/auth" +import { ModuleCredential } from "./types/cosmos/auth/v1beta1/auth" +import { Params } from "./types/cosmos/auth/v1beta1/auth" + + +export { + BaseAccount, + ModuleAccount, + ModuleCredential, + Params, + + } \ No newline at end of file diff --git a/ts-client/cosmos.auth.v1beta1/types/amino/amino.ts b/ts-client/cosmos.auth.v1beta1/types/amino/amino.ts new file mode 100644 index 000000000..4b67dcfe4 --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types/amino/amino.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "amino"; diff --git a/ts-client/cosmos.auth.v1beta1/types/cosmos/auth/v1beta1/auth.ts b/ts-client/cosmos.auth.v1beta1/types/cosmos/auth/v1beta1/auth.ts new file mode 100644 index 000000000..a37ece488 --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types/cosmos/auth/v1beta1/auth.ts @@ -0,0 +1,428 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Any } from "../../../google/protobuf/any"; + +export const protobufPackage = "cosmos.auth.v1beta1"; + +/** + * BaseAccount defines a base account type. It contains all the necessary fields + * for basic account functionality. Any custom account type should extend this + * type for additional functionality (e.g. vesting). + */ +export interface BaseAccount { + address: string; + pubKey: Any | undefined; + accountNumber: number; + sequence: number; +} + +/** ModuleAccount defines an account for modules that holds coins on a pool. */ +export interface ModuleAccount { + baseAccount: BaseAccount | undefined; + name: string; + permissions: string[]; +} + +/** + * ModuleCredential represents a unclaimable pubkey for base accounts controlled by modules. + * + * Since: cosmos-sdk 0.47 + */ +export interface ModuleCredential { + /** module_name is the name of the module used for address derivation (passed into address.Module). */ + moduleName: string; + /** + * derivation_keys is for deriving a module account address (passed into address.Module) + * adding more keys creates sub-account addresses (passed into address.Derive) + */ + derivationKeys: Uint8Array[]; +} + +/** Params defines the parameters for the auth module. */ +export interface Params { + maxMemoCharacters: number; + txSigLimit: number; + txSizeCostPerByte: number; + sigVerifyCostEd25519: number; + sigVerifyCostSecp256k1: number; +} + +function createBaseBaseAccount(): BaseAccount { + return { address: "", pubKey: undefined, accountNumber: 0, sequence: 0 }; +} + +export const BaseAccount = { + encode(message: BaseAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + if (message.pubKey !== undefined) { + Any.encode(message.pubKey, writer.uint32(18).fork()).ldelim(); + } + if (message.accountNumber !== 0) { + writer.uint32(24).uint64(message.accountNumber); + } + if (message.sequence !== 0) { + writer.uint32(32).uint64(message.sequence); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BaseAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBaseAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + case 2: + message.pubKey = Any.decode(reader, reader.uint32()); + break; + case 3: + message.accountNumber = longToNumber(reader.uint64() as Long); + break; + case 4: + message.sequence = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): BaseAccount { + return { + address: isSet(object.address) ? String(object.address) : "", + pubKey: isSet(object.pubKey) ? Any.fromJSON(object.pubKey) : undefined, + accountNumber: isSet(object.accountNumber) ? Number(object.accountNumber) : 0, + sequence: isSet(object.sequence) ? Number(object.sequence) : 0, + }; + }, + + toJSON(message: BaseAccount): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + message.pubKey !== undefined && (obj.pubKey = message.pubKey ? Any.toJSON(message.pubKey) : undefined); + message.accountNumber !== undefined && (obj.accountNumber = Math.round(message.accountNumber)); + message.sequence !== undefined && (obj.sequence = Math.round(message.sequence)); + return obj; + }, + + fromPartial, I>>(object: I): BaseAccount { + const message = createBaseBaseAccount(); + message.address = object.address ?? ""; + message.pubKey = (object.pubKey !== undefined && object.pubKey !== null) + ? Any.fromPartial(object.pubKey) + : undefined; + message.accountNumber = object.accountNumber ?? 0; + message.sequence = object.sequence ?? 0; + return message; + }, +}; + +function createBaseModuleAccount(): ModuleAccount { + return { baseAccount: undefined, name: "", permissions: [] }; +} + +export const ModuleAccount = { + encode(message: ModuleAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseAccount !== undefined) { + BaseAccount.encode(message.baseAccount, writer.uint32(10).fork()).ldelim(); + } + if (message.name !== "") { + writer.uint32(18).string(message.name); + } + for (const v of message.permissions) { + writer.uint32(26).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.baseAccount = BaseAccount.decode(reader, reader.uint32()); + break; + case 2: + message.name = reader.string(); + break; + case 3: + message.permissions.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ModuleAccount { + return { + baseAccount: isSet(object.baseAccount) ? BaseAccount.fromJSON(object.baseAccount) : undefined, + name: isSet(object.name) ? String(object.name) : "", + permissions: Array.isArray(object?.permissions) ? object.permissions.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: ModuleAccount): unknown { + const obj: any = {}; + message.baseAccount !== undefined + && (obj.baseAccount = message.baseAccount ? BaseAccount.toJSON(message.baseAccount) : undefined); + message.name !== undefined && (obj.name = message.name); + if (message.permissions) { + obj.permissions = message.permissions.map((e) => e); + } else { + obj.permissions = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ModuleAccount { + const message = createBaseModuleAccount(); + message.baseAccount = (object.baseAccount !== undefined && object.baseAccount !== null) + ? BaseAccount.fromPartial(object.baseAccount) + : undefined; + message.name = object.name ?? ""; + message.permissions = object.permissions?.map((e) => e) || []; + return message; + }, +}; + +function createBaseModuleCredential(): ModuleCredential { + return { moduleName: "", derivationKeys: [] }; +} + +export const ModuleCredential = { + encode(message: ModuleCredential, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.moduleName !== "") { + writer.uint32(10).string(message.moduleName); + } + for (const v of message.derivationKeys) { + writer.uint32(18).bytes(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleCredential { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleCredential(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.moduleName = reader.string(); + break; + case 2: + message.derivationKeys.push(reader.bytes()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ModuleCredential { + return { + moduleName: isSet(object.moduleName) ? String(object.moduleName) : "", + derivationKeys: Array.isArray(object?.derivationKeys) + ? object.derivationKeys.map((e: any) => bytesFromBase64(e)) + : [], + }; + }, + + toJSON(message: ModuleCredential): unknown { + const obj: any = {}; + message.moduleName !== undefined && (obj.moduleName = message.moduleName); + if (message.derivationKeys) { + obj.derivationKeys = message.derivationKeys.map((e) => base64FromBytes(e !== undefined ? e : new Uint8Array())); + } else { + obj.derivationKeys = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ModuleCredential { + const message = createBaseModuleCredential(); + message.moduleName = object.moduleName ?? ""; + message.derivationKeys = object.derivationKeys?.map((e) => e) || []; + return message; + }, +}; + +function createBaseParams(): Params { + return { + maxMemoCharacters: 0, + txSigLimit: 0, + txSizeCostPerByte: 0, + sigVerifyCostEd25519: 0, + sigVerifyCostSecp256k1: 0, + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.maxMemoCharacters !== 0) { + writer.uint32(8).uint64(message.maxMemoCharacters); + } + if (message.txSigLimit !== 0) { + writer.uint32(16).uint64(message.txSigLimit); + } + if (message.txSizeCostPerByte !== 0) { + writer.uint32(24).uint64(message.txSizeCostPerByte); + } + if (message.sigVerifyCostEd25519 !== 0) { + writer.uint32(32).uint64(message.sigVerifyCostEd25519); + } + if (message.sigVerifyCostSecp256k1 !== 0) { + writer.uint32(40).uint64(message.sigVerifyCostSecp256k1); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.maxMemoCharacters = longToNumber(reader.uint64() as Long); + break; + case 2: + message.txSigLimit = longToNumber(reader.uint64() as Long); + break; + case 3: + message.txSizeCostPerByte = longToNumber(reader.uint64() as Long); + break; + case 4: + message.sigVerifyCostEd25519 = longToNumber(reader.uint64() as Long); + break; + case 5: + message.sigVerifyCostSecp256k1 = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Params { + return { + maxMemoCharacters: isSet(object.maxMemoCharacters) ? Number(object.maxMemoCharacters) : 0, + txSigLimit: isSet(object.txSigLimit) ? Number(object.txSigLimit) : 0, + txSizeCostPerByte: isSet(object.txSizeCostPerByte) ? Number(object.txSizeCostPerByte) : 0, + sigVerifyCostEd25519: isSet(object.sigVerifyCostEd25519) ? Number(object.sigVerifyCostEd25519) : 0, + sigVerifyCostSecp256k1: isSet(object.sigVerifyCostSecp256k1) ? Number(object.sigVerifyCostSecp256k1) : 0, + }; + }, + + toJSON(message: Params): unknown { + const obj: any = {}; + message.maxMemoCharacters !== undefined && (obj.maxMemoCharacters = Math.round(message.maxMemoCharacters)); + message.txSigLimit !== undefined && (obj.txSigLimit = Math.round(message.txSigLimit)); + message.txSizeCostPerByte !== undefined && (obj.txSizeCostPerByte = Math.round(message.txSizeCostPerByte)); + message.sigVerifyCostEd25519 !== undefined && (obj.sigVerifyCostEd25519 = Math.round(message.sigVerifyCostEd25519)); + message.sigVerifyCostSecp256k1 !== undefined + && (obj.sigVerifyCostSecp256k1 = Math.round(message.sigVerifyCostSecp256k1)); + return obj; + }, + + fromPartial, I>>(object: I): Params { + const message = createBaseParams(); + message.maxMemoCharacters = object.maxMemoCharacters ?? 0; + message.txSigLimit = object.txSigLimit ?? 0; + message.txSizeCostPerByte = object.txSizeCostPerByte ?? 0; + message.sigVerifyCostEd25519 = object.sigVerifyCostEd25519 ?? 0; + message.sigVerifyCostSecp256k1 = object.sigVerifyCostSecp256k1 ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.auth.v1beta1/types/cosmos/auth/v1beta1/genesis.ts b/ts-client/cosmos.auth.v1beta1/types/cosmos/auth/v1beta1/genesis.ts new file mode 100644 index 000000000..73c25b97b --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types/cosmos/auth/v1beta1/genesis.ts @@ -0,0 +1,95 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Any } from "../../../google/protobuf/any"; +import { Params } from "./auth"; + +export const protobufPackage = "cosmos.auth.v1beta1"; + +/** GenesisState defines the auth module's genesis state. */ +export interface GenesisState { + /** params defines all the parameters of the module. */ + params: + | Params + | undefined; + /** accounts are the accounts present at genesis. */ + accounts: Any[]; +} + +function createBaseGenesisState(): GenesisState { + return { params: undefined, accounts: [] }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.accounts) { + Any.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + case 2: + message.accounts.push(Any.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GenesisState { + return { + params: isSet(object.params) ? Params.fromJSON(object.params) : undefined, + accounts: Array.isArray(object?.accounts) ? object.accounts.map((e: any) => Any.fromJSON(e)) : [], + }; + }, + + toJSON(message: GenesisState): unknown { + const obj: any = {}; + message.params !== undefined && (obj.params = message.params ? Params.toJSON(message.params) : undefined); + if (message.accounts) { + obj.accounts = message.accounts.map((e) => e ? Any.toJSON(e) : undefined); + } else { + obj.accounts = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GenesisState { + const message = createBaseGenesisState(); + message.params = (object.params !== undefined && object.params !== null) + ? Params.fromPartial(object.params) + : undefined; + message.accounts = object.accounts?.map((e) => Any.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.auth.v1beta1/types/cosmos/auth/v1beta1/query.ts b/ts-client/cosmos.auth.v1beta1/types/cosmos/auth/v1beta1/query.ts new file mode 100644 index 000000000..44b6ae9fb --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types/cosmos/auth/v1beta1/query.ts @@ -0,0 +1,1365 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Any } from "../../../google/protobuf/any"; +import { PageRequest, PageResponse } from "../../base/query/v1beta1/pagination"; +import { BaseAccount, Params } from "./auth"; + +export const protobufPackage = "cosmos.auth.v1beta1"; + +/** + * QueryAccountsRequest is the request type for the Query/Accounts RPC method. + * + * Since: cosmos-sdk 0.43 + */ +export interface QueryAccountsRequest { + /** pagination defines an optional pagination for the request. */ + pagination: PageRequest | undefined; +} + +/** + * QueryAccountsResponse is the response type for the Query/Accounts RPC method. + * + * Since: cosmos-sdk 0.43 + */ +export interface QueryAccountsResponse { + /** accounts are the existing accounts */ + accounts: Any[]; + /** pagination defines the pagination in the response. */ + pagination: PageResponse | undefined; +} + +/** QueryAccountRequest is the request type for the Query/Account RPC method. */ +export interface QueryAccountRequest { + /** address defines the address to query for. */ + address: string; +} + +/** QueryAccountResponse is the response type for the Query/Account RPC method. */ +export interface QueryAccountResponse { + /** account defines the account of the corresponding address. */ + account: Any | undefined; +} + +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequest { +} + +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponse { + /** params defines the parameters of the module. */ + params: Params | undefined; +} + +/** + * QueryModuleAccountsRequest is the request type for the Query/ModuleAccounts RPC method. + * + * Since: cosmos-sdk 0.46 + */ +export interface QueryModuleAccountsRequest { +} + +/** + * QueryModuleAccountsResponse is the response type for the Query/ModuleAccounts RPC method. + * + * Since: cosmos-sdk 0.46 + */ +export interface QueryModuleAccountsResponse { + accounts: Any[]; +} + +/** QueryModuleAccountByNameRequest is the request type for the Query/ModuleAccountByName RPC method. */ +export interface QueryModuleAccountByNameRequest { + name: string; +} + +/** QueryModuleAccountByNameResponse is the response type for the Query/ModuleAccountByName RPC method. */ +export interface QueryModuleAccountByNameResponse { + account: Any | undefined; +} + +/** + * Bech32PrefixRequest is the request type for Bech32Prefix rpc method. + * + * Since: cosmos-sdk 0.46 + */ +export interface Bech32PrefixRequest { +} + +/** + * Bech32PrefixResponse is the response type for Bech32Prefix rpc method. + * + * Since: cosmos-sdk 0.46 + */ +export interface Bech32PrefixResponse { + bech32Prefix: string; +} + +/** + * AddressBytesToStringRequest is the request type for AddressString rpc method. + * + * Since: cosmos-sdk 0.46 + */ +export interface AddressBytesToStringRequest { + addressBytes: Uint8Array; +} + +/** + * AddressBytesToStringResponse is the response type for AddressString rpc method. + * + * Since: cosmos-sdk 0.46 + */ +export interface AddressBytesToStringResponse { + addressString: string; +} + +/** + * AddressStringToBytesRequest is the request type for AccountBytes rpc method. + * + * Since: cosmos-sdk 0.46 + */ +export interface AddressStringToBytesRequest { + addressString: string; +} + +/** + * AddressStringToBytesResponse is the response type for AddressBytes rpc method. + * + * Since: cosmos-sdk 0.46 + */ +export interface AddressStringToBytesResponse { + addressBytes: Uint8Array; +} + +/** + * QueryAccountAddressByIDRequest is the request type for AccountAddressByID rpc method + * + * Since: cosmos-sdk 0.46.2 + */ +export interface QueryAccountAddressByIDRequest { + /** + * Deprecated, use account_id instead + * + * id is the account number of the address to be queried. This field + * should have been an uint64 (like all account numbers), and will be + * updated to uint64 in a future version of the auth query. + * + * @deprecated + */ + id: number; + /** + * account_id is the account number of the address to be queried. + * + * Since: cosmos-sdk 0.47 + */ + accountId: number; +} + +/** + * QueryAccountAddressByIDResponse is the response type for AccountAddressByID rpc method + * + * Since: cosmos-sdk 0.46.2 + */ +export interface QueryAccountAddressByIDResponse { + accountAddress: string; +} + +/** + * QueryAccountInfoRequest is the Query/AccountInfo request type. + * + * Since: cosmos-sdk 0.47 + */ +export interface QueryAccountInfoRequest { + /** address is the account address string. */ + address: string; +} + +/** + * QueryAccountInfoResponse is the Query/AccountInfo response type. + * + * Since: cosmos-sdk 0.47 + */ +export interface QueryAccountInfoResponse { + /** info is the account info which is represented by BaseAccount. */ + info: BaseAccount | undefined; +} + +function createBaseQueryAccountsRequest(): QueryAccountsRequest { + return { pagination: undefined }; +} + +export const QueryAccountsRequest = { + encode(message: QueryAccountsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAccountsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAccountsRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAccountsRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryAccountsRequest { + const message = createBaseQueryAccountsRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAccountsResponse(): QueryAccountsResponse { + return { accounts: [], pagination: undefined }; +} + +export const QueryAccountsResponse = { + encode(message: QueryAccountsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.accounts) { + Any.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAccountsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.accounts.push(Any.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAccountsResponse { + return { + accounts: Array.isArray(object?.accounts) ? object.accounts.map((e: any) => Any.fromJSON(e)) : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAccountsResponse): unknown { + const obj: any = {}; + if (message.accounts) { + obj.accounts = message.accounts.map((e) => e ? Any.toJSON(e) : undefined); + } else { + obj.accounts = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryAccountsResponse { + const message = createBaseQueryAccountsResponse(); + message.accounts = object.accounts?.map((e) => Any.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAccountRequest(): QueryAccountRequest { + return { address: "" }; +} + +export const QueryAccountRequest = { + encode(message: QueryAccountRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAccountRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAccountRequest { + return { address: isSet(object.address) ? String(object.address) : "" }; + }, + + toJSON(message: QueryAccountRequest): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + return obj; + }, + + fromPartial, I>>(object: I): QueryAccountRequest { + const message = createBaseQueryAccountRequest(); + message.address = object.address ?? ""; + return message; + }, +}; + +function createBaseQueryAccountResponse(): QueryAccountResponse { + return { account: undefined }; +} + +export const QueryAccountResponse = { + encode(message: QueryAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.account !== undefined) { + Any.encode(message.account, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.account = Any.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAccountResponse { + return { account: isSet(object.account) ? Any.fromJSON(object.account) : undefined }; + }, + + toJSON(message: QueryAccountResponse): unknown { + const obj: any = {}; + message.account !== undefined && (obj.account = message.account ? Any.toJSON(message.account) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryAccountResponse { + const message = createBaseQueryAccountResponse(); + message.account = (object.account !== undefined && object.account !== null) + ? Any.fromPartial(object.account) + : undefined; + return message; + }, +}; + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} + +export const QueryParamsRequest = { + encode(_: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): QueryParamsRequest { + return {}; + }, + + toJSON(_: QueryParamsRequest): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + }, +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { params: undefined }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryParamsResponse { + return { params: isSet(object.params) ? Params.fromJSON(object.params) : undefined }; + }, + + toJSON(message: QueryParamsResponse): unknown { + const obj: any = {}; + message.params !== undefined && (obj.params = message.params ? Params.toJSON(message.params) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.params = (object.params !== undefined && object.params !== null) + ? Params.fromPartial(object.params) + : undefined; + return message; + }, +}; + +function createBaseQueryModuleAccountsRequest(): QueryModuleAccountsRequest { + return {}; +} + +export const QueryModuleAccountsRequest = { + encode(_: QueryModuleAccountsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryModuleAccountsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryModuleAccountsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): QueryModuleAccountsRequest { + return {}; + }, + + toJSON(_: QueryModuleAccountsRequest): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): QueryModuleAccountsRequest { + const message = createBaseQueryModuleAccountsRequest(); + return message; + }, +}; + +function createBaseQueryModuleAccountsResponse(): QueryModuleAccountsResponse { + return { accounts: [] }; +} + +export const QueryModuleAccountsResponse = { + encode(message: QueryModuleAccountsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.accounts) { + Any.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryModuleAccountsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryModuleAccountsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.accounts.push(Any.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryModuleAccountsResponse { + return { accounts: Array.isArray(object?.accounts) ? object.accounts.map((e: any) => Any.fromJSON(e)) : [] }; + }, + + toJSON(message: QueryModuleAccountsResponse): unknown { + const obj: any = {}; + if (message.accounts) { + obj.accounts = message.accounts.map((e) => e ? Any.toJSON(e) : undefined); + } else { + obj.accounts = []; + } + return obj; + }, + + fromPartial, I>>(object: I): QueryModuleAccountsResponse { + const message = createBaseQueryModuleAccountsResponse(); + message.accounts = object.accounts?.map((e) => Any.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseQueryModuleAccountByNameRequest(): QueryModuleAccountByNameRequest { + return { name: "" }; +} + +export const QueryModuleAccountByNameRequest = { + encode(message: QueryModuleAccountByNameRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryModuleAccountByNameRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryModuleAccountByNameRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryModuleAccountByNameRequest { + return { name: isSet(object.name) ? String(object.name) : "" }; + }, + + toJSON(message: QueryModuleAccountByNameRequest): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryModuleAccountByNameRequest { + const message = createBaseQueryModuleAccountByNameRequest(); + message.name = object.name ?? ""; + return message; + }, +}; + +function createBaseQueryModuleAccountByNameResponse(): QueryModuleAccountByNameResponse { + return { account: undefined }; +} + +export const QueryModuleAccountByNameResponse = { + encode(message: QueryModuleAccountByNameResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.account !== undefined) { + Any.encode(message.account, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryModuleAccountByNameResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryModuleAccountByNameResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.account = Any.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryModuleAccountByNameResponse { + return { account: isSet(object.account) ? Any.fromJSON(object.account) : undefined }; + }, + + toJSON(message: QueryModuleAccountByNameResponse): unknown { + const obj: any = {}; + message.account !== undefined && (obj.account = message.account ? Any.toJSON(message.account) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryModuleAccountByNameResponse { + const message = createBaseQueryModuleAccountByNameResponse(); + message.account = (object.account !== undefined && object.account !== null) + ? Any.fromPartial(object.account) + : undefined; + return message; + }, +}; + +function createBaseBech32PrefixRequest(): Bech32PrefixRequest { + return {}; +} + +export const Bech32PrefixRequest = { + encode(_: Bech32PrefixRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Bech32PrefixRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBech32PrefixRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): Bech32PrefixRequest { + return {}; + }, + + toJSON(_: Bech32PrefixRequest): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): Bech32PrefixRequest { + const message = createBaseBech32PrefixRequest(); + return message; + }, +}; + +function createBaseBech32PrefixResponse(): Bech32PrefixResponse { + return { bech32Prefix: "" }; +} + +export const Bech32PrefixResponse = { + encode(message: Bech32PrefixResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.bech32Prefix !== "") { + writer.uint32(10).string(message.bech32Prefix); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Bech32PrefixResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBech32PrefixResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.bech32Prefix = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Bech32PrefixResponse { + return { bech32Prefix: isSet(object.bech32Prefix) ? String(object.bech32Prefix) : "" }; + }, + + toJSON(message: Bech32PrefixResponse): unknown { + const obj: any = {}; + message.bech32Prefix !== undefined && (obj.bech32Prefix = message.bech32Prefix); + return obj; + }, + + fromPartial, I>>(object: I): Bech32PrefixResponse { + const message = createBaseBech32PrefixResponse(); + message.bech32Prefix = object.bech32Prefix ?? ""; + return message; + }, +}; + +function createBaseAddressBytesToStringRequest(): AddressBytesToStringRequest { + return { addressBytes: new Uint8Array() }; +} + +export const AddressBytesToStringRequest = { + encode(message: AddressBytesToStringRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.addressBytes.length !== 0) { + writer.uint32(10).bytes(message.addressBytes); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AddressBytesToStringRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAddressBytesToStringRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.addressBytes = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): AddressBytesToStringRequest { + return { addressBytes: isSet(object.addressBytes) ? bytesFromBase64(object.addressBytes) : new Uint8Array() }; + }, + + toJSON(message: AddressBytesToStringRequest): unknown { + const obj: any = {}; + message.addressBytes !== undefined + && (obj.addressBytes = base64FromBytes( + message.addressBytes !== undefined ? message.addressBytes : new Uint8Array(), + )); + return obj; + }, + + fromPartial, I>>(object: I): AddressBytesToStringRequest { + const message = createBaseAddressBytesToStringRequest(); + message.addressBytes = object.addressBytes ?? new Uint8Array(); + return message; + }, +}; + +function createBaseAddressBytesToStringResponse(): AddressBytesToStringResponse { + return { addressString: "" }; +} + +export const AddressBytesToStringResponse = { + encode(message: AddressBytesToStringResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.addressString !== "") { + writer.uint32(10).string(message.addressString); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AddressBytesToStringResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAddressBytesToStringResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.addressString = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): AddressBytesToStringResponse { + return { addressString: isSet(object.addressString) ? String(object.addressString) : "" }; + }, + + toJSON(message: AddressBytesToStringResponse): unknown { + const obj: any = {}; + message.addressString !== undefined && (obj.addressString = message.addressString); + return obj; + }, + + fromPartial, I>>(object: I): AddressBytesToStringResponse { + const message = createBaseAddressBytesToStringResponse(); + message.addressString = object.addressString ?? ""; + return message; + }, +}; + +function createBaseAddressStringToBytesRequest(): AddressStringToBytesRequest { + return { addressString: "" }; +} + +export const AddressStringToBytesRequest = { + encode(message: AddressStringToBytesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.addressString !== "") { + writer.uint32(10).string(message.addressString); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AddressStringToBytesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAddressStringToBytesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.addressString = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): AddressStringToBytesRequest { + return { addressString: isSet(object.addressString) ? String(object.addressString) : "" }; + }, + + toJSON(message: AddressStringToBytesRequest): unknown { + const obj: any = {}; + message.addressString !== undefined && (obj.addressString = message.addressString); + return obj; + }, + + fromPartial, I>>(object: I): AddressStringToBytesRequest { + const message = createBaseAddressStringToBytesRequest(); + message.addressString = object.addressString ?? ""; + return message; + }, +}; + +function createBaseAddressStringToBytesResponse(): AddressStringToBytesResponse { + return { addressBytes: new Uint8Array() }; +} + +export const AddressStringToBytesResponse = { + encode(message: AddressStringToBytesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.addressBytes.length !== 0) { + writer.uint32(10).bytes(message.addressBytes); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AddressStringToBytesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAddressStringToBytesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.addressBytes = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): AddressStringToBytesResponse { + return { addressBytes: isSet(object.addressBytes) ? bytesFromBase64(object.addressBytes) : new Uint8Array() }; + }, + + toJSON(message: AddressStringToBytesResponse): unknown { + const obj: any = {}; + message.addressBytes !== undefined + && (obj.addressBytes = base64FromBytes( + message.addressBytes !== undefined ? message.addressBytes : new Uint8Array(), + )); + return obj; + }, + + fromPartial, I>>(object: I): AddressStringToBytesResponse { + const message = createBaseAddressStringToBytesResponse(); + message.addressBytes = object.addressBytes ?? new Uint8Array(); + return message; + }, +}; + +function createBaseQueryAccountAddressByIDRequest(): QueryAccountAddressByIDRequest { + return { id: 0, accountId: 0 }; +} + +export const QueryAccountAddressByIDRequest = { + encode(message: QueryAccountAddressByIDRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== 0) { + writer.uint32(8).int64(message.id); + } + if (message.accountId !== 0) { + writer.uint32(16).uint64(message.accountId); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountAddressByIDRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAccountAddressByIDRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.id = longToNumber(reader.int64() as Long); + break; + case 2: + message.accountId = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAccountAddressByIDRequest { + return { + id: isSet(object.id) ? Number(object.id) : 0, + accountId: isSet(object.accountId) ? Number(object.accountId) : 0, + }; + }, + + toJSON(message: QueryAccountAddressByIDRequest): unknown { + const obj: any = {}; + message.id !== undefined && (obj.id = Math.round(message.id)); + message.accountId !== undefined && (obj.accountId = Math.round(message.accountId)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAccountAddressByIDRequest { + const message = createBaseQueryAccountAddressByIDRequest(); + message.id = object.id ?? 0; + message.accountId = object.accountId ?? 0; + return message; + }, +}; + +function createBaseQueryAccountAddressByIDResponse(): QueryAccountAddressByIDResponse { + return { accountAddress: "" }; +} + +export const QueryAccountAddressByIDResponse = { + encode(message: QueryAccountAddressByIDResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.accountAddress !== "") { + writer.uint32(10).string(message.accountAddress); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountAddressByIDResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAccountAddressByIDResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.accountAddress = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAccountAddressByIDResponse { + return { accountAddress: isSet(object.accountAddress) ? String(object.accountAddress) : "" }; + }, + + toJSON(message: QueryAccountAddressByIDResponse): unknown { + const obj: any = {}; + message.accountAddress !== undefined && (obj.accountAddress = message.accountAddress); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAccountAddressByIDResponse { + const message = createBaseQueryAccountAddressByIDResponse(); + message.accountAddress = object.accountAddress ?? ""; + return message; + }, +}; + +function createBaseQueryAccountInfoRequest(): QueryAccountInfoRequest { + return { address: "" }; +} + +export const QueryAccountInfoRequest = { + encode(message: QueryAccountInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountInfoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAccountInfoRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAccountInfoRequest { + return { address: isSet(object.address) ? String(object.address) : "" }; + }, + + toJSON(message: QueryAccountInfoRequest): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + return obj; + }, + + fromPartial, I>>(object: I): QueryAccountInfoRequest { + const message = createBaseQueryAccountInfoRequest(); + message.address = object.address ?? ""; + return message; + }, +}; + +function createBaseQueryAccountInfoResponse(): QueryAccountInfoResponse { + return { info: undefined }; +} + +export const QueryAccountInfoResponse = { + encode(message: QueryAccountInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.info !== undefined) { + BaseAccount.encode(message.info, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAccountInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAccountInfoResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.info = BaseAccount.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAccountInfoResponse { + return { info: isSet(object.info) ? BaseAccount.fromJSON(object.info) : undefined }; + }, + + toJSON(message: QueryAccountInfoResponse): unknown { + const obj: any = {}; + message.info !== undefined && (obj.info = message.info ? BaseAccount.toJSON(message.info) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryAccountInfoResponse { + const message = createBaseQueryAccountInfoResponse(); + message.info = (object.info !== undefined && object.info !== null) + ? BaseAccount.fromPartial(object.info) + : undefined; + return message; + }, +}; + +/** Query defines the gRPC querier service. */ +export interface Query { + /** + * Accounts returns all the existing accounts. + * + * When called from another module, this query might consume a high amount of + * gas if the pagination field is incorrectly set. + * + * Since: cosmos-sdk 0.43 + */ + Accounts(request: QueryAccountsRequest): Promise; + /** Account returns account details based on address. */ + Account(request: QueryAccountRequest): Promise; + /** + * AccountAddressByID returns account address based on account number. + * + * Since: cosmos-sdk 0.46.2 + */ + AccountAddressByID(request: QueryAccountAddressByIDRequest): Promise; + /** Params queries all parameters. */ + Params(request: QueryParamsRequest): Promise; + /** + * ModuleAccounts returns all the existing module accounts. + * + * Since: cosmos-sdk 0.46 + */ + ModuleAccounts(request: QueryModuleAccountsRequest): Promise; + /** ModuleAccountByName returns the module account info by module name */ + ModuleAccountByName(request: QueryModuleAccountByNameRequest): Promise; + /** + * Bech32Prefix queries bech32Prefix + * + * Since: cosmos-sdk 0.46 + */ + Bech32Prefix(request: Bech32PrefixRequest): Promise; + /** + * AddressBytesToString converts Account Address bytes to string + * + * Since: cosmos-sdk 0.46 + */ + AddressBytesToString(request: AddressBytesToStringRequest): Promise; + /** + * AddressStringToBytes converts Address string to bytes + * + * Since: cosmos-sdk 0.46 + */ + AddressStringToBytes(request: AddressStringToBytesRequest): Promise; + /** + * AccountInfo queries account info which is common to all account types. + * + * Since: cosmos-sdk 0.47 + */ + AccountInfo(request: QueryAccountInfoRequest): Promise; +} + +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.Accounts = this.Accounts.bind(this); + this.Account = this.Account.bind(this); + this.AccountAddressByID = this.AccountAddressByID.bind(this); + this.Params = this.Params.bind(this); + this.ModuleAccounts = this.ModuleAccounts.bind(this); + this.ModuleAccountByName = this.ModuleAccountByName.bind(this); + this.Bech32Prefix = this.Bech32Prefix.bind(this); + this.AddressBytesToString = this.AddressBytesToString.bind(this); + this.AddressStringToBytes = this.AddressStringToBytes.bind(this); + this.AccountInfo = this.AccountInfo.bind(this); + } + Accounts(request: QueryAccountsRequest): Promise { + const data = QueryAccountsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "Accounts", data); + return promise.then((data) => QueryAccountsResponse.decode(new _m0.Reader(data))); + } + + Account(request: QueryAccountRequest): Promise { + const data = QueryAccountRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "Account", data); + return promise.then((data) => QueryAccountResponse.decode(new _m0.Reader(data))); + } + + AccountAddressByID(request: QueryAccountAddressByIDRequest): Promise { + const data = QueryAccountAddressByIDRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "AccountAddressByID", data); + return promise.then((data) => QueryAccountAddressByIDResponse.decode(new _m0.Reader(data))); + } + + Params(request: QueryParamsRequest): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "Params", data); + return promise.then((data) => QueryParamsResponse.decode(new _m0.Reader(data))); + } + + ModuleAccounts(request: QueryModuleAccountsRequest): Promise { + const data = QueryModuleAccountsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "ModuleAccounts", data); + return promise.then((data) => QueryModuleAccountsResponse.decode(new _m0.Reader(data))); + } + + ModuleAccountByName(request: QueryModuleAccountByNameRequest): Promise { + const data = QueryModuleAccountByNameRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "ModuleAccountByName", data); + return promise.then((data) => QueryModuleAccountByNameResponse.decode(new _m0.Reader(data))); + } + + Bech32Prefix(request: Bech32PrefixRequest): Promise { + const data = Bech32PrefixRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "Bech32Prefix", data); + return promise.then((data) => Bech32PrefixResponse.decode(new _m0.Reader(data))); + } + + AddressBytesToString(request: AddressBytesToStringRequest): Promise { + const data = AddressBytesToStringRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "AddressBytesToString", data); + return promise.then((data) => AddressBytesToStringResponse.decode(new _m0.Reader(data))); + } + + AddressStringToBytes(request: AddressStringToBytesRequest): Promise { + const data = AddressStringToBytesRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "AddressStringToBytes", data); + return promise.then((data) => AddressStringToBytesResponse.decode(new _m0.Reader(data))); + } + + AccountInfo(request: QueryAccountInfoRequest): Promise { + const data = QueryAccountInfoRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Query", "AccountInfo", data); + return promise.then((data) => QueryAccountInfoResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.auth.v1beta1/types/cosmos/auth/v1beta1/tx.ts b/ts-client/cosmos.auth.v1beta1/types/cosmos/auth/v1beta1/tx.ts new file mode 100644 index 000000000..c5681b210 --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types/cosmos/auth/v1beta1/tx.ts @@ -0,0 +1,172 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Params } from "./auth"; + +export const protobufPackage = "cosmos.auth.v1beta1"; + +/** + * MsgUpdateParams is the Msg/UpdateParams request type. + * + * Since: cosmos-sdk 0.47 + */ +export interface MsgUpdateParams { + /** authority is the address that controls the module (defaults to x/gov unless overwritten). */ + authority: string; + /** + * params defines the x/auth parameters to update. + * + * NOTE: All parameters must be supplied. + */ + params: Params | undefined; +} + +/** + * MsgUpdateParamsResponse defines the response structure for executing a + * MsgUpdateParams message. + * + * Since: cosmos-sdk 0.47 + */ +export interface MsgUpdateParamsResponse { +} + +function createBaseMsgUpdateParams(): MsgUpdateParams { + return { authority: "", params: undefined }; +} + +export const MsgUpdateParams = { + encode(message: MsgUpdateParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + case 2: + message.params = Params.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgUpdateParams { + return { + authority: isSet(object.authority) ? String(object.authority) : "", + params: isSet(object.params) ? Params.fromJSON(object.params) : undefined, + }; + }, + + toJSON(message: MsgUpdateParams): unknown { + const obj: any = {}; + message.authority !== undefined && (obj.authority = message.authority); + message.params !== undefined && (obj.params = message.params ? Params.toJSON(message.params) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): MsgUpdateParams { + const message = createBaseMsgUpdateParams(); + message.authority = object.authority ?? ""; + message.params = (object.params !== undefined && object.params !== null) + ? Params.fromPartial(object.params) + : undefined; + return message; + }, +}; + +function createBaseMsgUpdateParamsResponse(): MsgUpdateParamsResponse { + return {}; +} + +export const MsgUpdateParamsResponse = { + encode(_: MsgUpdateParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateParamsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgUpdateParamsResponse { + return {}; + }, + + toJSON(_: MsgUpdateParamsResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgUpdateParamsResponse { + const message = createBaseMsgUpdateParamsResponse(); + return message; + }, +}; + +/** Msg defines the x/auth Msg service. */ +export interface Msg { + /** + * UpdateParams defines a (governance) operation for updating the x/auth module + * parameters. The authority defaults to the x/gov module account. + * + * Since: cosmos-sdk 0.47 + */ + UpdateParams(request: MsgUpdateParams): Promise; +} + +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.UpdateParams = this.UpdateParams.bind(this); + } + UpdateParams(request: MsgUpdateParams): Promise { + const data = MsgUpdateParams.encode(request).finish(); + const promise = this.rpc.request("cosmos.auth.v1beta1.Msg", "UpdateParams", data); + return promise.then((data) => MsgUpdateParamsResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.auth.v1beta1/types/cosmos/base/query/v1beta1/pagination.ts b/ts-client/cosmos.auth.v1beta1/types/cosmos/base/query/v1beta1/pagination.ts new file mode 100644 index 000000000..a31ca249d --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types/cosmos/base/query/v1beta1/pagination.ts @@ -0,0 +1,286 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.base.query.v1beta1"; + +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset: number; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: number; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + countTotal: boolean; + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse: boolean; +} + +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total: number; +} + +function createBasePageRequest(): PageRequest { + return { key: new Uint8Array(), offset: 0, limit: 0, countTotal: false, reverse: false }; +} + +export const PageRequest = { + encode(message: PageRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.offset !== 0) { + writer.uint32(16).uint64(message.offset); + } + if (message.limit !== 0) { + writer.uint32(24).uint64(message.limit); + } + if (message.countTotal === true) { + writer.uint32(32).bool(message.countTotal); + } + if (message.reverse === true) { + writer.uint32(40).bool(message.reverse); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + case 2: + message.offset = longToNumber(reader.uint64() as Long); + break; + case 3: + message.limit = longToNumber(reader.uint64() as Long); + break; + case 4: + message.countTotal = reader.bool(); + break; + case 5: + message.reverse = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageRequest { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + offset: isSet(object.offset) ? Number(object.offset) : 0, + limit: isSet(object.limit) ? Number(object.limit) : 0, + countTotal: isSet(object.countTotal) ? Boolean(object.countTotal) : false, + reverse: isSet(object.reverse) ? Boolean(object.reverse) : false, + }; + }, + + toJSON(message: PageRequest): unknown { + const obj: any = {}; + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.offset !== undefined && (obj.offset = Math.round(message.offset)); + message.limit !== undefined && (obj.limit = Math.round(message.limit)); + message.countTotal !== undefined && (obj.countTotal = message.countTotal); + message.reverse !== undefined && (obj.reverse = message.reverse); + return obj; + }, + + fromPartial, I>>(object: I): PageRequest { + const message = createBasePageRequest(); + message.key = object.key ?? new Uint8Array(); + message.offset = object.offset ?? 0; + message.limit = object.limit ?? 0; + message.countTotal = object.countTotal ?? false; + message.reverse = object.reverse ?? false; + return message; + }, +}; + +function createBasePageResponse(): PageResponse { + return { nextKey: new Uint8Array(), total: 0 }; +} + +export const PageResponse = { + encode(message: PageResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextKey.length !== 0) { + writer.uint32(10).bytes(message.nextKey); + } + if (message.total !== 0) { + writer.uint32(16).uint64(message.total); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nextKey = reader.bytes(); + break; + case 2: + message.total = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageResponse { + return { + nextKey: isSet(object.nextKey) ? bytesFromBase64(object.nextKey) : new Uint8Array(), + total: isSet(object.total) ? Number(object.total) : 0, + }; + }, + + toJSON(message: PageResponse): unknown { + const obj: any = {}; + message.nextKey !== undefined + && (obj.nextKey = base64FromBytes(message.nextKey !== undefined ? message.nextKey : new Uint8Array())); + message.total !== undefined && (obj.total = Math.round(message.total)); + return obj; + }, + + fromPartial, I>>(object: I): PageResponse { + const message = createBasePageResponse(); + message.nextKey = object.nextKey ?? new Uint8Array(); + message.total = object.total ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.auth.v1beta1/types/cosmos/msg/v1/msg.ts b/ts-client/cosmos.auth.v1beta1/types/cosmos/msg/v1/msg.ts new file mode 100644 index 000000000..f0ff44eb2 --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types/cosmos/msg/v1/msg.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "cosmos.msg.v1"; diff --git a/ts-client/cosmos.auth.v1beta1/types/cosmos/query/v1/query.ts b/ts-client/cosmos.auth.v1beta1/types/cosmos/query/v1/query.ts new file mode 100644 index 000000000..f82c51385 --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types/cosmos/query/v1/query.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "cosmos.query.v1"; diff --git a/ts-client/cosmos.auth.v1beta1/types/cosmos_proto/cosmos.ts b/ts-client/cosmos.auth.v1beta1/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..79c8d3486 --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types/cosmos_proto/cosmos.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.auth.v1beta1/types/gogoproto/gogo.ts b/ts-client/cosmos.auth.v1beta1/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/cosmos.auth.v1beta1/types/google/api/annotations.ts b/ts-client/cosmos.auth.v1beta1/types/google/api/annotations.ts new file mode 100644 index 000000000..aace4787f --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types/google/api/annotations.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "google.api"; diff --git a/ts-client/cosmos.auth.v1beta1/types/google/api/http.ts b/ts-client/cosmos.auth.v1beta1/types/google/api/http.ts new file mode 100644 index 000000000..17e770d15 --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types/google/api/http.ts @@ -0,0 +1,589 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get: + | string + | undefined; + /** Used for updating a resource. */ + put: + | string + | undefined; + /** Used for creating a resource. */ + post: + | string + | undefined; + /** Used for deleting a resource. */ + delete: + | string + | undefined; + /** Used for updating a resource. */ + patch: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined + && (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined + && (obj.custom = message.custom ? CustomHttpPattern.toJSON(message.custom) : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.additionalBindings = []; + } + return obj; + }, + + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.auth.v1beta1/types/google/protobuf/any.ts b/ts-client/cosmos.auth.v1beta1/types/google/protobuf/any.ts new file mode 100644 index 000000000..c4ebf38be --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types/google/protobuf/any.ts @@ -0,0 +1,240 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := anypb.New(foo) + * if err != nil { + * ... + * } + * ... + * foo := &pb.Foo{} + * if err := any.UnmarshalTo(foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface Any { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + typeUrl: string; + /** Must be a valid serialized protocol buffer of the above specified type. */ + value: Uint8Array; +} + +function createBaseAny(): Any { + return { typeUrl: "", value: new Uint8Array() }; +} + +export const Any = { + encode(message: Any, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.typeUrl !== "") { + writer.uint32(10).string(message.typeUrl); + } + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Any { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAny(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.typeUrl = reader.string(); + break; + case 2: + message.value = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Any { + return { + typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "", + value: isSet(object.value) ? bytesFromBase64(object.value) : new Uint8Array(), + }; + }, + + toJSON(message: Any): unknown { + const obj: any = {}; + message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl); + message.value !== undefined + && (obj.value = base64FromBytes(message.value !== undefined ? message.value : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): Any { + const message = createBaseAny(); + message.typeUrl = object.typeUrl ?? ""; + message.value = object.value ?? new Uint8Array(); + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.auth.v1beta1/types/google/protobuf/descriptor.ts b/ts-client/cosmos.auth.v1beta1/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/cosmos.auth.v1beta1/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/index.ts b/ts-client/cosmos.base.tendermint.v1beta1/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/cosmos.base.tendermint.v1beta1/module.ts b/ts-client/cosmos.base.tendermint.v1beta1/module.ts new file mode 100755 index 000000000..38218fe00 --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/module.ts @@ -0,0 +1,110 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; + +import { Validator as typeValidator} from "./types" +import { VersionInfo as typeVersionInfo} from "./types" +import { Module as typeModule} from "./types" +import { ProofOp as typeProofOp} from "./types" +import { ProofOps as typeProofOps} from "./types" +import { Block as typeBlock} from "./types" +import { Header as typeHeader} from "./types" + +export { }; + + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + Validator: getStructure(typeValidator.fromPartial({})), + VersionInfo: getStructure(typeVersionInfo.fromPartial({})), + Module: getStructure(typeModule.fromPartial({})), + ProofOp: getStructure(typeProofOp.fromPartial({})), + ProofOps: getStructure(typeProofOps.fromPartial({})), + Block: getStructure(typeBlock.fromPartial({})), + Header: getStructure(typeHeader.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + CosmosBaseTendermintV1Beta1: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/cosmos.base.tendermint.v1beta1/registry.ts b/ts-client/cosmos.base.tendermint.v1beta1/registry.ts new file mode 100755 index 000000000..26157e4ff --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/registry.ts @@ -0,0 +1,7 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; + +const msgTypes: Array<[string, GeneratedType]> = [ + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/cosmos.base.tendermint.v1beta1/rest.ts b/ts-client/cosmos.base.tendermint.v1beta1/rest.ts new file mode 100644 index 000000000..7e209ed2b --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/rest.ts @@ -0,0 +1,1138 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +export interface CryptoPublicKey { + /** @format byte */ + ed25519?: string; + + /** @format byte */ + secp256k1?: string; +} + +export interface P2PDefaultNodeInfo { + protocol_version?: P2PProtocolVersion; + default_node_id?: string; + listen_addr?: string; + network?: string; + version?: string; + + /** @format byte */ + channels?: string; + moniker?: string; + other?: P2PDefaultNodeInfoOther; +} + +export interface P2PDefaultNodeInfoOther { + tx_index?: string; + rpc_address?: string; +} + +export interface P2PProtocolVersion { + /** @format uint64 */ + p2p?: string; + + /** @format uint64 */ + block?: string; + + /** @format uint64 */ + app?: string; +} + +/** +* `Any` contains an arbitrary serialized protocol buffer message along with a +URL that describes the type of the serialized message. + +Protobuf library provides support to pack/unpack Any values in the form +of utility functions or additional generated methods of the Any type. + +Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + +Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + +The pack methods provided by protobuf library will by default use +'type.googleapis.com/full.type.name' as the type URL and the unpack +methods only use the fully qualified type name after the last '/' +in the type URL, for example "foo.bar.com/x/y.z" will yield type +name "y.z". + + +JSON +==== +The JSON representation of an `Any` value uses the regular +representation of the deserialized, embedded message, with an +additional field `@type` which contains the type URL. Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + +If the embedded message type is well-known and has a custom JSON +representation, that representation will be embedded adding a field +`value` which holds the custom JSON in addition to the `@type` +field. Example (for message [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } +*/ +export interface ProtobufAny { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +export interface TenderminttypesBlock { + /** Header defines the structure of a block header. */ + header?: TenderminttypesHeader; + data?: TypesData; + evidence?: TypesEvidenceList; + + /** Commit contains the evidence that a block was committed by a set of validators. */ + last_commit?: TypesCommit; +} + +/** + * Header defines the structure of a block header. + */ +export interface TenderminttypesHeader { + /** + * basic block info + * Consensus captures the consensus rules for processing a block in the blockchain, + * including all blockchain data structures and the rules of the application's + * state transition machine. + */ + version?: VersionConsensus; + chain_id?: string; + + /** @format int64 */ + height?: string; + + /** @format date-time */ + time?: string; + + /** prev block info */ + last_block_id?: TypesBlockID; + + /** + * hashes of block data + * commit from validators from the last block + * @format byte + */ + last_commit_hash?: string; + + /** + * transactions + * @format byte + */ + data_hash?: string; + + /** + * hashes from the app output from the prev block + * validators for the current block + * @format byte + */ + validators_hash?: string; + + /** + * validators for the next block + * @format byte + */ + next_validators_hash?: string; + + /** + * consensus params for current block + * @format byte + */ + consensus_hash?: string; + + /** + * state after txs from the previous block + * @format byte + */ + app_hash?: string; + + /** + * root hash of all results from the txs from the previous block + * @format byte + */ + last_results_hash?: string; + + /** + * consensus info + * evidence included in the block + * @format byte + */ + evidence_hash?: string; + + /** + * original proposer of the block + * @format byte + */ + proposer_address?: string; +} + +export interface TenderminttypesValidator { + /** @format byte */ + address?: string; + pub_key?: CryptoPublicKey; + + /** @format int64 */ + voting_power?: string; + + /** @format int64 */ + proposer_priority?: string; +} + +/** +* Block is tendermint type Block, with the Header proposer address +field converted to bech32 string. +*/ +export interface Tendermintv1Beta1Block { + /** Header defines the structure of a Tendermint block header. */ + header?: Tendermintv1Beta1Header; + data?: TypesData; + evidence?: TypesEvidenceList; + + /** Commit contains the evidence that a block was committed by a set of validators. */ + last_commit?: TypesCommit; +} + +/** + * Header defines the structure of a Tendermint block header. + */ +export interface Tendermintv1Beta1Header { + /** + * basic block info + * Consensus captures the consensus rules for processing a block in the blockchain, + * including all blockchain data structures and the rules of the application's + * state transition machine. + */ + version?: VersionConsensus; + chain_id?: string; + + /** @format int64 */ + height?: string; + + /** @format date-time */ + time?: string; + + /** prev block info */ + last_block_id?: TypesBlockID; + + /** + * hashes of block data + * commit from validators from the last block + * @format byte + */ + last_commit_hash?: string; + + /** + * transactions + * @format byte + */ + data_hash?: string; + + /** + * hashes from the app output from the prev block + * validators for the current block + * @format byte + */ + validators_hash?: string; + + /** + * validators for the next block + * @format byte + */ + next_validators_hash?: string; + + /** + * consensus params for current block + * @format byte + */ + consensus_hash?: string; + + /** + * state after txs from the previous block + * @format byte + */ + app_hash?: string; + + /** + * root hash of all results from the txs from the previous block + * @format byte + */ + last_results_hash?: string; + + /** + * consensus info + * evidence included in the block + * @format byte + */ + evidence_hash?: string; + + /** + * proposer_address is the original block proposer address, formatted as a Bech32 string. + * In Tendermint, this type is `bytes`, but in the SDK, we convert it to a Bech32 string + * for better UX. + * + * original proposer of the block + */ + proposer_address?: string; +} + +/** +* ProofOp defines an operation used for calculating Merkle root. The data could +be arbitrary format, providing necessary data for example neighbouring node +hash. + +Note: This type is a duplicate of the ProofOp proto type defined in Tendermint. +*/ +export interface Tendermintv1Beta1ProofOp { + type?: string; + + /** @format byte */ + key?: string; + + /** @format byte */ + data?: string; +} + +/** +* ProofOps is Merkle proof defined by the list of ProofOps. + +Note: This type is a duplicate of the ProofOps proto type defined in Tendermint. +*/ +export interface Tendermintv1Beta1ProofOps { + ops?: Tendermintv1Beta1ProofOp[]; +} + +/** + * Validator is the type for the validator-set. + */ +export interface Tendermintv1Beta1Validator { + address?: string; + + /** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * Example 1: Pack and unpack a message in C++. + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * Example 2: Pack and unpack a message in Java. + * Any any = Any.pack(foo); + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * Example 3: Pack and unpack a message in Python. + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * Example 4: Pack and unpack a message in Go + * foo := &pb.Foo{...} + * any, err := anypb.New(foo) + * if err != nil { + * ... + * } + * ... + * foo := &pb.Foo{} + * if err := any.UnmarshalTo(foo); err != nil { + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + */ + pub_key?: ProtobufAny; + + /** @format int64 */ + voting_power?: string; + + /** @format int64 */ + proposer_priority?: string; +} + +export interface TypesBlockID { + /** @format byte */ + hash?: string; + part_set_header?: TypesPartSetHeader; +} + +export enum TypesBlockIDFlag { + BLOCK_ID_FLAG_UNKNOWN = "BLOCK_ID_FLAG_UNKNOWN", + BLOCK_ID_FLAG_ABSENT = "BLOCK_ID_FLAG_ABSENT", + BLOCK_ID_FLAG_COMMIT = "BLOCK_ID_FLAG_COMMIT", + BLOCK_ID_FLAG_NIL = "BLOCK_ID_FLAG_NIL", +} + +/** + * Commit contains the evidence that a block was committed by a set of validators. + */ +export interface TypesCommit { + /** @format int64 */ + height?: string; + + /** @format int32 */ + round?: number; + block_id?: TypesBlockID; + signatures?: TypesCommitSig[]; +} + +/** + * CommitSig is a part of the Vote included in a Commit. + */ +export interface TypesCommitSig { + block_id_flag?: TypesBlockIDFlag; + + /** @format byte */ + validator_address?: string; + + /** @format date-time */ + timestamp?: string; + + /** @format byte */ + signature?: string; +} + +export interface TypesData { + /** + * Txs that will be applied by state @ block.Height+1. + * NOTE: not all txs here are valid. We're just agreeing on the order first. + * This means that block.AppHash does not include these txs. + */ + txs?: string[]; +} + +/** + * DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes. + */ +export interface TypesDuplicateVoteEvidence { + /** + * Vote represents a prevote, precommit, or commit vote from validators for + * consensus. + */ + vote_a?: TypesVote; + + /** + * Vote represents a prevote, precommit, or commit vote from validators for + * consensus. + */ + vote_b?: TypesVote; + + /** @format int64 */ + total_voting_power?: string; + + /** @format int64 */ + validator_power?: string; + + /** @format date-time */ + timestamp?: string; +} + +export interface TypesEvidence { + /** DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes. */ + duplicate_vote_evidence?: TypesDuplicateVoteEvidence; + + /** LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client. */ + light_client_attack_evidence?: TypesLightClientAttackEvidence; +} + +export interface TypesEvidenceList { + evidence?: TypesEvidence[]; +} + +export interface TypesLightBlock { + signed_header?: TypesSignedHeader; + validator_set?: TypesValidatorSet; +} + +/** + * LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client. + */ +export interface TypesLightClientAttackEvidence { + conflicting_block?: TypesLightBlock; + + /** @format int64 */ + common_height?: string; + byzantine_validators?: TenderminttypesValidator[]; + + /** @format int64 */ + total_voting_power?: string; + + /** @format date-time */ + timestamp?: string; +} + +export interface TypesPartSetHeader { + /** @format int64 */ + total?: number; + + /** @format byte */ + hash?: string; +} + +export interface TypesSignedHeader { + /** Header defines the structure of a block header. */ + header?: TenderminttypesHeader; + + /** Commit contains the evidence that a block was committed by a set of validators. */ + commit?: TypesCommit; +} + +/** +* SignedMsgType is a type of signed message in the consensus. + + - SIGNED_MSG_TYPE_PREVOTE: Votes + - SIGNED_MSG_TYPE_PROPOSAL: Proposals +*/ +export enum TypesSignedMsgType { + SIGNED_MSG_TYPE_UNKNOWN = "SIGNED_MSG_TYPE_UNKNOWN", + SIGNED_MSG_TYPE_PREVOTE = "SIGNED_MSG_TYPE_PREVOTE", + SIGNED_MSG_TYPE_PRECOMMIT = "SIGNED_MSG_TYPE_PRECOMMIT", + SIGNED_MSG_TYPE_PROPOSAL = "SIGNED_MSG_TYPE_PROPOSAL", +} + +export interface TypesValidatorSet { + validators?: TenderminttypesValidator[]; + proposer?: TenderminttypesValidator; + + /** @format int64 */ + total_voting_power?: string; +} + +/** +* Vote represents a prevote, precommit, or commit vote from validators for +consensus. +*/ +export interface TypesVote { + /** + * SignedMsgType is a type of signed message in the consensus. + * + * - SIGNED_MSG_TYPE_PREVOTE: Votes + * - SIGNED_MSG_TYPE_PROPOSAL: Proposals + */ + type?: TypesSignedMsgType; + + /** @format int64 */ + height?: string; + + /** @format int32 */ + round?: number; + + /** zero if vote is nil. */ + block_id?: TypesBlockID; + + /** @format date-time */ + timestamp?: string; + + /** @format byte */ + validator_address?: string; + + /** @format int32 */ + validator_index?: number; + + /** @format byte */ + signature?: string; +} + +/** +* ABCIQueryResponse defines the response structure for the ABCIQuery gRPC query. + +Note: This type is a duplicate of the ResponseQuery proto type defined in +Tendermint. +*/ +export interface V1Beta1ABCIQueryResponse { + /** @format int64 */ + code?: number; + + /** nondeterministic */ + log?: string; + + /** nondeterministic */ + info?: string; + + /** @format int64 */ + index?: string; + + /** @format byte */ + key?: string; + + /** @format byte */ + value?: string; + + /** + * ProofOps is Merkle proof defined by the list of ProofOps. + * + * Note: This type is a duplicate of the ProofOps proto type defined in Tendermint. + */ + proof_ops?: Tendermintv1Beta1ProofOps; + + /** @format int64 */ + height?: string; + codespace?: string; +} + +/** + * GetBlockByHeightResponse is the response type for the Query/GetBlockByHeight RPC method. + */ +export interface V1Beta1GetBlockByHeightResponse { + block_id?: TypesBlockID; + + /** Deprecated: please use `sdk_block` instead */ + block?: TenderminttypesBlock; + + /** + * Since: cosmos-sdk 0.47 + * Block is tendermint type Block, with the Header proposer address + * field converted to bech32 string. + */ + sdk_block?: Tendermintv1Beta1Block; +} + +/** + * GetLatestBlockResponse is the response type for the Query/GetLatestBlock RPC method. + */ +export interface V1Beta1GetLatestBlockResponse { + block_id?: TypesBlockID; + + /** Deprecated: please use `sdk_block` instead */ + block?: TenderminttypesBlock; + + /** + * Since: cosmos-sdk 0.47 + * Block is tendermint type Block, with the Header proposer address + * field converted to bech32 string. + */ + sdk_block?: Tendermintv1Beta1Block; +} + +/** + * GetLatestValidatorSetResponse is the response type for the Query/GetValidatorSetByHeight RPC method. + */ +export interface V1Beta1GetLatestValidatorSetResponse { + /** @format int64 */ + block_height?: string; + validators?: Tendermintv1Beta1Validator[]; + + /** pagination defines an pagination for the response. */ + pagination?: V1Beta1PageResponse; +} + +/** + * GetNodeInfoResponse is the response type for the Query/GetNodeInfo RPC method. + */ +export interface V1Beta1GetNodeInfoResponse { + default_node_info?: P2PDefaultNodeInfo; + + /** VersionInfo is the type for the GetNodeInfoResponse message. */ + application_version?: V1Beta1VersionInfo; +} + +/** + * GetSyncingResponse is the response type for the Query/GetSyncing RPC method. + */ +export interface V1Beta1GetSyncingResponse { + syncing?: boolean; +} + +/** + * GetValidatorSetByHeightResponse is the response type for the Query/GetValidatorSetByHeight RPC method. + */ +export interface V1Beta1GetValidatorSetByHeightResponse { + /** @format int64 */ + block_height?: string; + validators?: Tendermintv1Beta1Validator[]; + + /** pagination defines an pagination for the response. */ + pagination?: V1Beta1PageResponse; +} + +export interface V1Beta1Module { + /** module path */ + path?: string; + + /** module version */ + version?: string; + + /** checksum */ + sum?: string; +} + +/** +* message SomeRequest { + Foo some_parameter = 1; + PageRequest pagination = 2; + } +*/ +export interface V1Beta1PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + * @format byte + */ + key?: string; + + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + * @format uint64 + */ + offset?: string; + + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + * @format uint64 + */ + limit?: string; + + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + count_total?: boolean; + + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse?: boolean; +} + +/** +* PageResponse is to be embedded in gRPC response messages where the +corresponding request message has used PageRequest. + + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } +*/ +export interface V1Beta1PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + * @format byte + */ + next_key?: string; + + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + * @format uint64 + */ + total?: string; +} + +/** + * VersionInfo is the type for the GetNodeInfoResponse message. + */ +export interface V1Beta1VersionInfo { + name?: string; + app_name?: string; + version?: string; + git_commit?: string; + build_tags?: string; + go_version?: string; + build_deps?: V1Beta1Module[]; + + /** Since: cosmos-sdk 0.43 */ + cosmos_sdk_version?: string; +} + +/** +* Consensus captures the consensus rules for processing a block in the blockchain, +including all blockchain data structures and the rules of the application's +state transition machine. +*/ +export interface VersionConsensus { + /** @format uint64 */ + block?: string; + + /** @format uint64 */ + app?: string; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title cosmos/base/tendermint/v1beta1/query.proto + * @version version not set + */ +export class Api extends HttpClient { + /** + * @description Since: cosmos-sdk 0.46 + * + * @tags Service + * @name ServiceAbciQuery + * @summary ABCIQuery defines a query handler that supports ABCI queries directly to the +application, bypassing Tendermint completely. The ABCI query must contain +a valid and supported path, including app, custom, p2p, and store. + * @request GET:/cosmos/base/tendermint/v1beta1/abci_query + */ + serviceABCIQuery = ( + query?: { data?: string; path?: string; height?: string; prove?: boolean }, + params: RequestParams = {}, + ) => + this.request({ + path: `/cosmos/base/tendermint/v1beta1/abci_query`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Service + * @name ServiceGetLatestBlock + * @summary GetLatestBlock returns the latest block. + * @request GET:/cosmos/base/tendermint/v1beta1/blocks/latest + */ + serviceGetLatestBlock = (params: RequestParams = {}) => + this.request({ + path: `/cosmos/base/tendermint/v1beta1/blocks/latest`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Service + * @name ServiceGetBlockByHeight + * @summary GetBlockByHeight queries block for given height. + * @request GET:/cosmos/base/tendermint/v1beta1/blocks/{height} + */ + serviceGetBlockByHeight = (height: string, params: RequestParams = {}) => + this.request({ + path: `/cosmos/base/tendermint/v1beta1/blocks/${height}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Service + * @name ServiceGetNodeInfo + * @summary GetNodeInfo queries the current node info. + * @request GET:/cosmos/base/tendermint/v1beta1/node_info + */ + serviceGetNodeInfo = (params: RequestParams = {}) => + this.request({ + path: `/cosmos/base/tendermint/v1beta1/node_info`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Service + * @name ServiceGetSyncing + * @summary GetSyncing queries node syncing. + * @request GET:/cosmos/base/tendermint/v1beta1/syncing + */ + serviceGetSyncing = (params: RequestParams = {}) => + this.request({ + path: `/cosmos/base/tendermint/v1beta1/syncing`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Service + * @name ServiceGetLatestValidatorSet + * @summary GetLatestValidatorSet queries latest validator-set. + * @request GET:/cosmos/base/tendermint/v1beta1/validatorsets/latest + */ + serviceGetLatestValidatorSet = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/cosmos/base/tendermint/v1beta1/validatorsets/latest`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Service + * @name ServiceGetValidatorSetByHeight + * @summary GetValidatorSetByHeight queries validator-set at a given height. + * @request GET:/cosmos/base/tendermint/v1beta1/validatorsets/{height} + */ + serviceGetValidatorSetByHeight = ( + height: string, + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/cosmos/base/tendermint/v1beta1/validatorsets/${height}`, + method: "GET", + query: query, + format: "json", + ...params, + }); +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types.ts b/ts-client/cosmos.base.tendermint.v1beta1/types.ts new file mode 100755 index 000000000..eb17bac84 --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types.ts @@ -0,0 +1,19 @@ +import { Validator } from "./types/cosmos/base/tendermint/v1beta1/query" +import { VersionInfo } from "./types/cosmos/base/tendermint/v1beta1/query" +import { Module } from "./types/cosmos/base/tendermint/v1beta1/query" +import { ProofOp } from "./types/cosmos/base/tendermint/v1beta1/query" +import { ProofOps } from "./types/cosmos/base/tendermint/v1beta1/query" +import { Block } from "./types/cosmos/base/tendermint/v1beta1/types" +import { Header } from "./types/cosmos/base/tendermint/v1beta1/types" + + +export { + Validator, + VersionInfo, + Module, + ProofOp, + ProofOps, + Block, + Header, + + } \ No newline at end of file diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/amino/amino.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/amino/amino.ts new file mode 100644 index 000000000..4b67dcfe4 --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/amino/amino.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "amino"; diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/cosmos/base/query/v1beta1/pagination.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/cosmos/base/query/v1beta1/pagination.ts new file mode 100644 index 000000000..a31ca249d --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/cosmos/base/query/v1beta1/pagination.ts @@ -0,0 +1,286 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.base.query.v1beta1"; + +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset: number; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: number; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + countTotal: boolean; + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse: boolean; +} + +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total: number; +} + +function createBasePageRequest(): PageRequest { + return { key: new Uint8Array(), offset: 0, limit: 0, countTotal: false, reverse: false }; +} + +export const PageRequest = { + encode(message: PageRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.offset !== 0) { + writer.uint32(16).uint64(message.offset); + } + if (message.limit !== 0) { + writer.uint32(24).uint64(message.limit); + } + if (message.countTotal === true) { + writer.uint32(32).bool(message.countTotal); + } + if (message.reverse === true) { + writer.uint32(40).bool(message.reverse); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + case 2: + message.offset = longToNumber(reader.uint64() as Long); + break; + case 3: + message.limit = longToNumber(reader.uint64() as Long); + break; + case 4: + message.countTotal = reader.bool(); + break; + case 5: + message.reverse = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageRequest { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + offset: isSet(object.offset) ? Number(object.offset) : 0, + limit: isSet(object.limit) ? Number(object.limit) : 0, + countTotal: isSet(object.countTotal) ? Boolean(object.countTotal) : false, + reverse: isSet(object.reverse) ? Boolean(object.reverse) : false, + }; + }, + + toJSON(message: PageRequest): unknown { + const obj: any = {}; + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.offset !== undefined && (obj.offset = Math.round(message.offset)); + message.limit !== undefined && (obj.limit = Math.round(message.limit)); + message.countTotal !== undefined && (obj.countTotal = message.countTotal); + message.reverse !== undefined && (obj.reverse = message.reverse); + return obj; + }, + + fromPartial, I>>(object: I): PageRequest { + const message = createBasePageRequest(); + message.key = object.key ?? new Uint8Array(); + message.offset = object.offset ?? 0; + message.limit = object.limit ?? 0; + message.countTotal = object.countTotal ?? false; + message.reverse = object.reverse ?? false; + return message; + }, +}; + +function createBasePageResponse(): PageResponse { + return { nextKey: new Uint8Array(), total: 0 }; +} + +export const PageResponse = { + encode(message: PageResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextKey.length !== 0) { + writer.uint32(10).bytes(message.nextKey); + } + if (message.total !== 0) { + writer.uint32(16).uint64(message.total); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nextKey = reader.bytes(); + break; + case 2: + message.total = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageResponse { + return { + nextKey: isSet(object.nextKey) ? bytesFromBase64(object.nextKey) : new Uint8Array(), + total: isSet(object.total) ? Number(object.total) : 0, + }; + }, + + toJSON(message: PageResponse): unknown { + const obj: any = {}; + message.nextKey !== undefined + && (obj.nextKey = base64FromBytes(message.nextKey !== undefined ? message.nextKey : new Uint8Array())); + message.total !== undefined && (obj.total = Math.round(message.total)); + return obj; + }, + + fromPartial, I>>(object: I): PageResponse { + const message = createBasePageResponse(); + message.nextKey = object.nextKey ?? new Uint8Array(); + message.total = object.total ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/cosmos/base/tendermint/v1beta1/query.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/cosmos/base/tendermint/v1beta1/query.ts new file mode 100644 index 000000000..d351f48d8 --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/cosmos/base/tendermint/v1beta1/query.ts @@ -0,0 +1,1616 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Any } from "../../../../google/protobuf/any"; +import { DefaultNodeInfo } from "../../../../tendermint/p2p/types"; +import { Block } from "../../../../tendermint/types/block"; +import { BlockID } from "../../../../tendermint/types/types"; +import { PageRequest, PageResponse } from "../../query/v1beta1/pagination"; +import { Block as Block1 } from "./types"; + +export const protobufPackage = "cosmos.base.tendermint.v1beta1"; + +/** GetValidatorSetByHeightRequest is the request type for the Query/GetValidatorSetByHeight RPC method. */ +export interface GetValidatorSetByHeightRequest { + height: number; + /** pagination defines an pagination for the request. */ + pagination: PageRequest | undefined; +} + +/** GetValidatorSetByHeightResponse is the response type for the Query/GetValidatorSetByHeight RPC method. */ +export interface GetValidatorSetByHeightResponse { + blockHeight: number; + validators: Validator[]; + /** pagination defines an pagination for the response. */ + pagination: PageResponse | undefined; +} + +/** GetLatestValidatorSetRequest is the request type for the Query/GetValidatorSetByHeight RPC method. */ +export interface GetLatestValidatorSetRequest { + /** pagination defines an pagination for the request. */ + pagination: PageRequest | undefined; +} + +/** GetLatestValidatorSetResponse is the response type for the Query/GetValidatorSetByHeight RPC method. */ +export interface GetLatestValidatorSetResponse { + blockHeight: number; + validators: Validator[]; + /** pagination defines an pagination for the response. */ + pagination: PageResponse | undefined; +} + +/** Validator is the type for the validator-set. */ +export interface Validator { + address: string; + pubKey: Any | undefined; + votingPower: number; + proposerPriority: number; +} + +/** GetBlockByHeightRequest is the request type for the Query/GetBlockByHeight RPC method. */ +export interface GetBlockByHeightRequest { + height: number; +} + +/** GetBlockByHeightResponse is the response type for the Query/GetBlockByHeight RPC method. */ +export interface GetBlockByHeightResponse { + blockId: + | BlockID + | undefined; + /** Deprecated: please use `sdk_block` instead */ + block: + | Block + | undefined; + /** Since: cosmos-sdk 0.47 */ + sdkBlock: Block1 | undefined; +} + +/** GetLatestBlockRequest is the request type for the Query/GetLatestBlock RPC method. */ +export interface GetLatestBlockRequest { +} + +/** GetLatestBlockResponse is the response type for the Query/GetLatestBlock RPC method. */ +export interface GetLatestBlockResponse { + blockId: + | BlockID + | undefined; + /** Deprecated: please use `sdk_block` instead */ + block: + | Block + | undefined; + /** Since: cosmos-sdk 0.47 */ + sdkBlock: Block1 | undefined; +} + +/** GetSyncingRequest is the request type for the Query/GetSyncing RPC method. */ +export interface GetSyncingRequest { +} + +/** GetSyncingResponse is the response type for the Query/GetSyncing RPC method. */ +export interface GetSyncingResponse { + syncing: boolean; +} + +/** GetNodeInfoRequest is the request type for the Query/GetNodeInfo RPC method. */ +export interface GetNodeInfoRequest { +} + +/** GetNodeInfoResponse is the response type for the Query/GetNodeInfo RPC method. */ +export interface GetNodeInfoResponse { + defaultNodeInfo: DefaultNodeInfo | undefined; + applicationVersion: VersionInfo | undefined; +} + +/** VersionInfo is the type for the GetNodeInfoResponse message. */ +export interface VersionInfo { + name: string; + appName: string; + version: string; + gitCommit: string; + buildTags: string; + goVersion: string; + buildDeps: Module[]; + /** Since: cosmos-sdk 0.43 */ + cosmosSdkVersion: string; +} + +/** Module is the type for VersionInfo */ +export interface Module { + /** module path */ + path: string; + /** module version */ + version: string; + /** checksum */ + sum: string; +} + +/** ABCIQueryRequest defines the request structure for the ABCIQuery gRPC query. */ +export interface ABCIQueryRequest { + data: Uint8Array; + path: string; + height: number; + prove: boolean; +} + +/** + * ABCIQueryResponse defines the response structure for the ABCIQuery gRPC query. + * + * Note: This type is a duplicate of the ResponseQuery proto type defined in + * Tendermint. + */ +export interface ABCIQueryResponse { + code: number; + /** nondeterministic */ + log: string; + /** nondeterministic */ + info: string; + index: number; + key: Uint8Array; + value: Uint8Array; + proofOps: ProofOps | undefined; + height: number; + codespace: string; +} + +/** + * ProofOp defines an operation used for calculating Merkle root. The data could + * be arbitrary format, providing necessary data for example neighbouring node + * hash. + * + * Note: This type is a duplicate of the ProofOp proto type defined in Tendermint. + */ +export interface ProofOp { + type: string; + key: Uint8Array; + data: Uint8Array; +} + +/** + * ProofOps is Merkle proof defined by the list of ProofOps. + * + * Note: This type is a duplicate of the ProofOps proto type defined in Tendermint. + */ +export interface ProofOps { + ops: ProofOp[]; +} + +function createBaseGetValidatorSetByHeightRequest(): GetValidatorSetByHeightRequest { + return { height: 0, pagination: undefined }; +} + +export const GetValidatorSetByHeightRequest = { + encode(message: GetValidatorSetByHeightRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.height !== 0) { + writer.uint32(8).int64(message.height); + } + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetValidatorSetByHeightRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetValidatorSetByHeightRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.height = longToNumber(reader.int64() as Long); + break; + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetValidatorSetByHeightRequest { + return { + height: isSet(object.height) ? Number(object.height) : 0, + pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: GetValidatorSetByHeightRequest): unknown { + const obj: any = {}; + message.height !== undefined && (obj.height = Math.round(message.height)); + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): GetValidatorSetByHeightRequest { + const message = createBaseGetValidatorSetByHeightRequest(); + message.height = object.height ?? 0; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseGetValidatorSetByHeightResponse(): GetValidatorSetByHeightResponse { + return { blockHeight: 0, validators: [], pagination: undefined }; +} + +export const GetValidatorSetByHeightResponse = { + encode(message: GetValidatorSetByHeightResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockHeight !== 0) { + writer.uint32(8).int64(message.blockHeight); + } + for (const v of message.validators) { + Validator.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetValidatorSetByHeightResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetValidatorSetByHeightResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.blockHeight = longToNumber(reader.int64() as Long); + break; + case 2: + message.validators.push(Validator.decode(reader, reader.uint32())); + break; + case 3: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetValidatorSetByHeightResponse { + return { + blockHeight: isSet(object.blockHeight) ? Number(object.blockHeight) : 0, + validators: Array.isArray(object?.validators) ? object.validators.map((e: any) => Validator.fromJSON(e)) : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: GetValidatorSetByHeightResponse): unknown { + const obj: any = {}; + message.blockHeight !== undefined && (obj.blockHeight = Math.round(message.blockHeight)); + if (message.validators) { + obj.validators = message.validators.map((e) => e ? Validator.toJSON(e) : undefined); + } else { + obj.validators = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): GetValidatorSetByHeightResponse { + const message = createBaseGetValidatorSetByHeightResponse(); + message.blockHeight = object.blockHeight ?? 0; + message.validators = object.validators?.map((e) => Validator.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseGetLatestValidatorSetRequest(): GetLatestValidatorSetRequest { + return { pagination: undefined }; +} + +export const GetLatestValidatorSetRequest = { + encode(message: GetLatestValidatorSetRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetLatestValidatorSetRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetLatestValidatorSetRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetLatestValidatorSetRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: GetLatestValidatorSetRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): GetLatestValidatorSetRequest { + const message = createBaseGetLatestValidatorSetRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseGetLatestValidatorSetResponse(): GetLatestValidatorSetResponse { + return { blockHeight: 0, validators: [], pagination: undefined }; +} + +export const GetLatestValidatorSetResponse = { + encode(message: GetLatestValidatorSetResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockHeight !== 0) { + writer.uint32(8).int64(message.blockHeight); + } + for (const v of message.validators) { + Validator.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetLatestValidatorSetResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetLatestValidatorSetResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.blockHeight = longToNumber(reader.int64() as Long); + break; + case 2: + message.validators.push(Validator.decode(reader, reader.uint32())); + break; + case 3: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetLatestValidatorSetResponse { + return { + blockHeight: isSet(object.blockHeight) ? Number(object.blockHeight) : 0, + validators: Array.isArray(object?.validators) ? object.validators.map((e: any) => Validator.fromJSON(e)) : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: GetLatestValidatorSetResponse): unknown { + const obj: any = {}; + message.blockHeight !== undefined && (obj.blockHeight = Math.round(message.blockHeight)); + if (message.validators) { + obj.validators = message.validators.map((e) => e ? Validator.toJSON(e) : undefined); + } else { + obj.validators = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): GetLatestValidatorSetResponse { + const message = createBaseGetLatestValidatorSetResponse(); + message.blockHeight = object.blockHeight ?? 0; + message.validators = object.validators?.map((e) => Validator.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseValidator(): Validator { + return { address: "", pubKey: undefined, votingPower: 0, proposerPriority: 0 }; +} + +export const Validator = { + encode(message: Validator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + if (message.pubKey !== undefined) { + Any.encode(message.pubKey, writer.uint32(18).fork()).ldelim(); + } + if (message.votingPower !== 0) { + writer.uint32(24).int64(message.votingPower); + } + if (message.proposerPriority !== 0) { + writer.uint32(32).int64(message.proposerPriority); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Validator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + case 2: + message.pubKey = Any.decode(reader, reader.uint32()); + break; + case 3: + message.votingPower = longToNumber(reader.int64() as Long); + break; + case 4: + message.proposerPriority = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Validator { + return { + address: isSet(object.address) ? String(object.address) : "", + pubKey: isSet(object.pubKey) ? Any.fromJSON(object.pubKey) : undefined, + votingPower: isSet(object.votingPower) ? Number(object.votingPower) : 0, + proposerPriority: isSet(object.proposerPriority) ? Number(object.proposerPriority) : 0, + }; + }, + + toJSON(message: Validator): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + message.pubKey !== undefined && (obj.pubKey = message.pubKey ? Any.toJSON(message.pubKey) : undefined); + message.votingPower !== undefined && (obj.votingPower = Math.round(message.votingPower)); + message.proposerPriority !== undefined && (obj.proposerPriority = Math.round(message.proposerPriority)); + return obj; + }, + + fromPartial, I>>(object: I): Validator { + const message = createBaseValidator(); + message.address = object.address ?? ""; + message.pubKey = (object.pubKey !== undefined && object.pubKey !== null) + ? Any.fromPartial(object.pubKey) + : undefined; + message.votingPower = object.votingPower ?? 0; + message.proposerPriority = object.proposerPriority ?? 0; + return message; + }, +}; + +function createBaseGetBlockByHeightRequest(): GetBlockByHeightRequest { + return { height: 0 }; +} + +export const GetBlockByHeightRequest = { + encode(message: GetBlockByHeightRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.height !== 0) { + writer.uint32(8).int64(message.height); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetBlockByHeightRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetBlockByHeightRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.height = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetBlockByHeightRequest { + return { height: isSet(object.height) ? Number(object.height) : 0 }; + }, + + toJSON(message: GetBlockByHeightRequest): unknown { + const obj: any = {}; + message.height !== undefined && (obj.height = Math.round(message.height)); + return obj; + }, + + fromPartial, I>>(object: I): GetBlockByHeightRequest { + const message = createBaseGetBlockByHeightRequest(); + message.height = object.height ?? 0; + return message; + }, +}; + +function createBaseGetBlockByHeightResponse(): GetBlockByHeightResponse { + return { blockId: undefined, block: undefined, sdkBlock: undefined }; +} + +export const GetBlockByHeightResponse = { + encode(message: GetBlockByHeightResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(10).fork()).ldelim(); + } + if (message.block !== undefined) { + Block.encode(message.block, writer.uint32(18).fork()).ldelim(); + } + if (message.sdkBlock !== undefined) { + Block1.encode(message.sdkBlock, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetBlockByHeightResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetBlockByHeightResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + case 2: + message.block = Block.decode(reader, reader.uint32()); + break; + case 3: + message.sdkBlock = Block1.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetBlockByHeightResponse { + return { + blockId: isSet(object.blockId) ? BlockID.fromJSON(object.blockId) : undefined, + block: isSet(object.block) ? Block.fromJSON(object.block) : undefined, + sdkBlock: isSet(object.sdkBlock) ? Block1.fromJSON(object.sdkBlock) : undefined, + }; + }, + + toJSON(message: GetBlockByHeightResponse): unknown { + const obj: any = {}; + message.blockId !== undefined && (obj.blockId = message.blockId ? BlockID.toJSON(message.blockId) : undefined); + message.block !== undefined && (obj.block = message.block ? Block.toJSON(message.block) : undefined); + message.sdkBlock !== undefined && (obj.sdkBlock = message.sdkBlock ? Block1.toJSON(message.sdkBlock) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): GetBlockByHeightResponse { + const message = createBaseGetBlockByHeightResponse(); + message.blockId = (object.blockId !== undefined && object.blockId !== null) + ? BlockID.fromPartial(object.blockId) + : undefined; + message.block = (object.block !== undefined && object.block !== null) ? Block.fromPartial(object.block) : undefined; + message.sdkBlock = (object.sdkBlock !== undefined && object.sdkBlock !== null) + ? Block1.fromPartial(object.sdkBlock) + : undefined; + return message; + }, +}; + +function createBaseGetLatestBlockRequest(): GetLatestBlockRequest { + return {}; +} + +export const GetLatestBlockRequest = { + encode(_: GetLatestBlockRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetLatestBlockRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetLatestBlockRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): GetLatestBlockRequest { + return {}; + }, + + toJSON(_: GetLatestBlockRequest): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): GetLatestBlockRequest { + const message = createBaseGetLatestBlockRequest(); + return message; + }, +}; + +function createBaseGetLatestBlockResponse(): GetLatestBlockResponse { + return { blockId: undefined, block: undefined, sdkBlock: undefined }; +} + +export const GetLatestBlockResponse = { + encode(message: GetLatestBlockResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(10).fork()).ldelim(); + } + if (message.block !== undefined) { + Block.encode(message.block, writer.uint32(18).fork()).ldelim(); + } + if (message.sdkBlock !== undefined) { + Block1.encode(message.sdkBlock, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetLatestBlockResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetLatestBlockResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + case 2: + message.block = Block.decode(reader, reader.uint32()); + break; + case 3: + message.sdkBlock = Block1.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetLatestBlockResponse { + return { + blockId: isSet(object.blockId) ? BlockID.fromJSON(object.blockId) : undefined, + block: isSet(object.block) ? Block.fromJSON(object.block) : undefined, + sdkBlock: isSet(object.sdkBlock) ? Block1.fromJSON(object.sdkBlock) : undefined, + }; + }, + + toJSON(message: GetLatestBlockResponse): unknown { + const obj: any = {}; + message.blockId !== undefined && (obj.blockId = message.blockId ? BlockID.toJSON(message.blockId) : undefined); + message.block !== undefined && (obj.block = message.block ? Block.toJSON(message.block) : undefined); + message.sdkBlock !== undefined && (obj.sdkBlock = message.sdkBlock ? Block1.toJSON(message.sdkBlock) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): GetLatestBlockResponse { + const message = createBaseGetLatestBlockResponse(); + message.blockId = (object.blockId !== undefined && object.blockId !== null) + ? BlockID.fromPartial(object.blockId) + : undefined; + message.block = (object.block !== undefined && object.block !== null) ? Block.fromPartial(object.block) : undefined; + message.sdkBlock = (object.sdkBlock !== undefined && object.sdkBlock !== null) + ? Block1.fromPartial(object.sdkBlock) + : undefined; + return message; + }, +}; + +function createBaseGetSyncingRequest(): GetSyncingRequest { + return {}; +} + +export const GetSyncingRequest = { + encode(_: GetSyncingRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetSyncingRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetSyncingRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): GetSyncingRequest { + return {}; + }, + + toJSON(_: GetSyncingRequest): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): GetSyncingRequest { + const message = createBaseGetSyncingRequest(); + return message; + }, +}; + +function createBaseGetSyncingResponse(): GetSyncingResponse { + return { syncing: false }; +} + +export const GetSyncingResponse = { + encode(message: GetSyncingResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.syncing === true) { + writer.uint32(8).bool(message.syncing); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetSyncingResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetSyncingResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.syncing = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetSyncingResponse { + return { syncing: isSet(object.syncing) ? Boolean(object.syncing) : false }; + }, + + toJSON(message: GetSyncingResponse): unknown { + const obj: any = {}; + message.syncing !== undefined && (obj.syncing = message.syncing); + return obj; + }, + + fromPartial, I>>(object: I): GetSyncingResponse { + const message = createBaseGetSyncingResponse(); + message.syncing = object.syncing ?? false; + return message; + }, +}; + +function createBaseGetNodeInfoRequest(): GetNodeInfoRequest { + return {}; +} + +export const GetNodeInfoRequest = { + encode(_: GetNodeInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetNodeInfoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetNodeInfoRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): GetNodeInfoRequest { + return {}; + }, + + toJSON(_: GetNodeInfoRequest): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): GetNodeInfoRequest { + const message = createBaseGetNodeInfoRequest(); + return message; + }, +}; + +function createBaseGetNodeInfoResponse(): GetNodeInfoResponse { + return { defaultNodeInfo: undefined, applicationVersion: undefined }; +} + +export const GetNodeInfoResponse = { + encode(message: GetNodeInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.defaultNodeInfo !== undefined) { + DefaultNodeInfo.encode(message.defaultNodeInfo, writer.uint32(10).fork()).ldelim(); + } + if (message.applicationVersion !== undefined) { + VersionInfo.encode(message.applicationVersion, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetNodeInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetNodeInfoResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.defaultNodeInfo = DefaultNodeInfo.decode(reader, reader.uint32()); + break; + case 2: + message.applicationVersion = VersionInfo.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetNodeInfoResponse { + return { + defaultNodeInfo: isSet(object.defaultNodeInfo) ? DefaultNodeInfo.fromJSON(object.defaultNodeInfo) : undefined, + applicationVersion: isSet(object.applicationVersion) + ? VersionInfo.fromJSON(object.applicationVersion) + : undefined, + }; + }, + + toJSON(message: GetNodeInfoResponse): unknown { + const obj: any = {}; + message.defaultNodeInfo !== undefined + && (obj.defaultNodeInfo = message.defaultNodeInfo ? DefaultNodeInfo.toJSON(message.defaultNodeInfo) : undefined); + message.applicationVersion !== undefined && (obj.applicationVersion = message.applicationVersion + ? VersionInfo.toJSON(message.applicationVersion) + : undefined); + return obj; + }, + + fromPartial, I>>(object: I): GetNodeInfoResponse { + const message = createBaseGetNodeInfoResponse(); + message.defaultNodeInfo = (object.defaultNodeInfo !== undefined && object.defaultNodeInfo !== null) + ? DefaultNodeInfo.fromPartial(object.defaultNodeInfo) + : undefined; + message.applicationVersion = (object.applicationVersion !== undefined && object.applicationVersion !== null) + ? VersionInfo.fromPartial(object.applicationVersion) + : undefined; + return message; + }, +}; + +function createBaseVersionInfo(): VersionInfo { + return { + name: "", + appName: "", + version: "", + gitCommit: "", + buildTags: "", + goVersion: "", + buildDeps: [], + cosmosSdkVersion: "", + }; +} + +export const VersionInfo = { + encode(message: VersionInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.appName !== "") { + writer.uint32(18).string(message.appName); + } + if (message.version !== "") { + writer.uint32(26).string(message.version); + } + if (message.gitCommit !== "") { + writer.uint32(34).string(message.gitCommit); + } + if (message.buildTags !== "") { + writer.uint32(42).string(message.buildTags); + } + if (message.goVersion !== "") { + writer.uint32(50).string(message.goVersion); + } + for (const v of message.buildDeps) { + Module.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.cosmosSdkVersion !== "") { + writer.uint32(66).string(message.cosmosSdkVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): VersionInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVersionInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.appName = reader.string(); + break; + case 3: + message.version = reader.string(); + break; + case 4: + message.gitCommit = reader.string(); + break; + case 5: + message.buildTags = reader.string(); + break; + case 6: + message.goVersion = reader.string(); + break; + case 7: + message.buildDeps.push(Module.decode(reader, reader.uint32())); + break; + case 8: + message.cosmosSdkVersion = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): VersionInfo { + return { + name: isSet(object.name) ? String(object.name) : "", + appName: isSet(object.appName) ? String(object.appName) : "", + version: isSet(object.version) ? String(object.version) : "", + gitCommit: isSet(object.gitCommit) ? String(object.gitCommit) : "", + buildTags: isSet(object.buildTags) ? String(object.buildTags) : "", + goVersion: isSet(object.goVersion) ? String(object.goVersion) : "", + buildDeps: Array.isArray(object?.buildDeps) ? object.buildDeps.map((e: any) => Module.fromJSON(e)) : [], + cosmosSdkVersion: isSet(object.cosmosSdkVersion) ? String(object.cosmosSdkVersion) : "", + }; + }, + + toJSON(message: VersionInfo): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.appName !== undefined && (obj.appName = message.appName); + message.version !== undefined && (obj.version = message.version); + message.gitCommit !== undefined && (obj.gitCommit = message.gitCommit); + message.buildTags !== undefined && (obj.buildTags = message.buildTags); + message.goVersion !== undefined && (obj.goVersion = message.goVersion); + if (message.buildDeps) { + obj.buildDeps = message.buildDeps.map((e) => e ? Module.toJSON(e) : undefined); + } else { + obj.buildDeps = []; + } + message.cosmosSdkVersion !== undefined && (obj.cosmosSdkVersion = message.cosmosSdkVersion); + return obj; + }, + + fromPartial, I>>(object: I): VersionInfo { + const message = createBaseVersionInfo(); + message.name = object.name ?? ""; + message.appName = object.appName ?? ""; + message.version = object.version ?? ""; + message.gitCommit = object.gitCommit ?? ""; + message.buildTags = object.buildTags ?? ""; + message.goVersion = object.goVersion ?? ""; + message.buildDeps = object.buildDeps?.map((e) => Module.fromPartial(e)) || []; + message.cosmosSdkVersion = object.cosmosSdkVersion ?? ""; + return message; + }, +}; + +function createBaseModule(): Module { + return { path: "", version: "", sum: "" }; +} + +export const Module = { + encode(message: Module, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.path !== "") { + writer.uint32(10).string(message.path); + } + if (message.version !== "") { + writer.uint32(18).string(message.version); + } + if (message.sum !== "") { + writer.uint32(26).string(message.sum); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Module { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.path = reader.string(); + break; + case 2: + message.version = reader.string(); + break; + case 3: + message.sum = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Module { + return { + path: isSet(object.path) ? String(object.path) : "", + version: isSet(object.version) ? String(object.version) : "", + sum: isSet(object.sum) ? String(object.sum) : "", + }; + }, + + toJSON(message: Module): unknown { + const obj: any = {}; + message.path !== undefined && (obj.path = message.path); + message.version !== undefined && (obj.version = message.version); + message.sum !== undefined && (obj.sum = message.sum); + return obj; + }, + + fromPartial, I>>(object: I): Module { + const message = createBaseModule(); + message.path = object.path ?? ""; + message.version = object.version ?? ""; + message.sum = object.sum ?? ""; + return message; + }, +}; + +function createBaseABCIQueryRequest(): ABCIQueryRequest { + return { data: new Uint8Array(), path: "", height: 0, prove: false }; +} + +export const ABCIQueryRequest = { + encode(message: ABCIQueryRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.data.length !== 0) { + writer.uint32(10).bytes(message.data); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + if (message.height !== 0) { + writer.uint32(24).int64(message.height); + } + if (message.prove === true) { + writer.uint32(32).bool(message.prove); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ABCIQueryRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseABCIQueryRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.data = reader.bytes(); + break; + case 2: + message.path = reader.string(); + break; + case 3: + message.height = longToNumber(reader.int64() as Long); + break; + case 4: + message.prove = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ABCIQueryRequest { + return { + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(), + path: isSet(object.path) ? String(object.path) : "", + height: isSet(object.height) ? Number(object.height) : 0, + prove: isSet(object.prove) ? Boolean(object.prove) : false, + }; + }, + + toJSON(message: ABCIQueryRequest): unknown { + const obj: any = {}; + message.data !== undefined + && (obj.data = base64FromBytes(message.data !== undefined ? message.data : new Uint8Array())); + message.path !== undefined && (obj.path = message.path); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.prove !== undefined && (obj.prove = message.prove); + return obj; + }, + + fromPartial, I>>(object: I): ABCIQueryRequest { + const message = createBaseABCIQueryRequest(); + message.data = object.data ?? new Uint8Array(); + message.path = object.path ?? ""; + message.height = object.height ?? 0; + message.prove = object.prove ?? false; + return message; + }, +}; + +function createBaseABCIQueryResponse(): ABCIQueryResponse { + return { + code: 0, + log: "", + info: "", + index: 0, + key: new Uint8Array(), + value: new Uint8Array(), + proofOps: undefined, + height: 0, + codespace: "", + }; +} + +export const ABCIQueryResponse = { + encode(message: ABCIQueryResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.code !== 0) { + writer.uint32(8).uint32(message.code); + } + if (message.log !== "") { + writer.uint32(26).string(message.log); + } + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + if (message.index !== 0) { + writer.uint32(40).int64(message.index); + } + if (message.key.length !== 0) { + writer.uint32(50).bytes(message.key); + } + if (message.value.length !== 0) { + writer.uint32(58).bytes(message.value); + } + if (message.proofOps !== undefined) { + ProofOps.encode(message.proofOps, writer.uint32(66).fork()).ldelim(); + } + if (message.height !== 0) { + writer.uint32(72).int64(message.height); + } + if (message.codespace !== "") { + writer.uint32(82).string(message.codespace); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ABCIQueryResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseABCIQueryResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.code = reader.uint32(); + break; + case 3: + message.log = reader.string(); + break; + case 4: + message.info = reader.string(); + break; + case 5: + message.index = longToNumber(reader.int64() as Long); + break; + case 6: + message.key = reader.bytes(); + break; + case 7: + message.value = reader.bytes(); + break; + case 8: + message.proofOps = ProofOps.decode(reader, reader.uint32()); + break; + case 9: + message.height = longToNumber(reader.int64() as Long); + break; + case 10: + message.codespace = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ABCIQueryResponse { + return { + code: isSet(object.code) ? Number(object.code) : 0, + log: isSet(object.log) ? String(object.log) : "", + info: isSet(object.info) ? String(object.info) : "", + index: isSet(object.index) ? Number(object.index) : 0, + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + value: isSet(object.value) ? bytesFromBase64(object.value) : new Uint8Array(), + proofOps: isSet(object.proofOps) ? ProofOps.fromJSON(object.proofOps) : undefined, + height: isSet(object.height) ? Number(object.height) : 0, + codespace: isSet(object.codespace) ? String(object.codespace) : "", + }; + }, + + toJSON(message: ABCIQueryResponse): unknown { + const obj: any = {}; + message.code !== undefined && (obj.code = Math.round(message.code)); + message.log !== undefined && (obj.log = message.log); + message.info !== undefined && (obj.info = message.info); + message.index !== undefined && (obj.index = Math.round(message.index)); + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.value !== undefined + && (obj.value = base64FromBytes(message.value !== undefined ? message.value : new Uint8Array())); + message.proofOps !== undefined && (obj.proofOps = message.proofOps ? ProofOps.toJSON(message.proofOps) : undefined); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.codespace !== undefined && (obj.codespace = message.codespace); + return obj; + }, + + fromPartial, I>>(object: I): ABCIQueryResponse { + const message = createBaseABCIQueryResponse(); + message.code = object.code ?? 0; + message.log = object.log ?? ""; + message.info = object.info ?? ""; + message.index = object.index ?? 0; + message.key = object.key ?? new Uint8Array(); + message.value = object.value ?? new Uint8Array(); + message.proofOps = (object.proofOps !== undefined && object.proofOps !== null) + ? ProofOps.fromPartial(object.proofOps) + : undefined; + message.height = object.height ?? 0; + message.codespace = object.codespace ?? ""; + return message; + }, +}; + +function createBaseProofOp(): ProofOp { + return { type: "", key: new Uint8Array(), data: new Uint8Array() }; +} + +export const ProofOp = { + encode(message: ProofOp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== "") { + writer.uint32(10).string(message.type); + } + if (message.key.length !== 0) { + writer.uint32(18).bytes(message.key); + } + if (message.data.length !== 0) { + writer.uint32(26).bytes(message.data); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProofOp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProofOp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.type = reader.string(); + break; + case 2: + message.key = reader.bytes(); + break; + case 3: + message.data = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ProofOp { + return { + type: isSet(object.type) ? String(object.type) : "", + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(), + }; + }, + + toJSON(message: ProofOp): unknown { + const obj: any = {}; + message.type !== undefined && (obj.type = message.type); + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.data !== undefined + && (obj.data = base64FromBytes(message.data !== undefined ? message.data : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): ProofOp { + const message = createBaseProofOp(); + message.type = object.type ?? ""; + message.key = object.key ?? new Uint8Array(); + message.data = object.data ?? new Uint8Array(); + return message; + }, +}; + +function createBaseProofOps(): ProofOps { + return { ops: [] }; +} + +export const ProofOps = { + encode(message: ProofOps, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.ops) { + ProofOp.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProofOps { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProofOps(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ops.push(ProofOp.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ProofOps { + return { ops: Array.isArray(object?.ops) ? object.ops.map((e: any) => ProofOp.fromJSON(e)) : [] }; + }, + + toJSON(message: ProofOps): unknown { + const obj: any = {}; + if (message.ops) { + obj.ops = message.ops.map((e) => e ? ProofOp.toJSON(e) : undefined); + } else { + obj.ops = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ProofOps { + const message = createBaseProofOps(); + message.ops = object.ops?.map((e) => ProofOp.fromPartial(e)) || []; + return message; + }, +}; + +/** Service defines the gRPC querier service for tendermint queries. */ +export interface Service { + /** GetNodeInfo queries the current node info. */ + GetNodeInfo(request: GetNodeInfoRequest): Promise; + /** GetSyncing queries node syncing. */ + GetSyncing(request: GetSyncingRequest): Promise; + /** GetLatestBlock returns the latest block. */ + GetLatestBlock(request: GetLatestBlockRequest): Promise; + /** GetBlockByHeight queries block for given height. */ + GetBlockByHeight(request: GetBlockByHeightRequest): Promise; + /** GetLatestValidatorSet queries latest validator-set. */ + GetLatestValidatorSet(request: GetLatestValidatorSetRequest): Promise; + /** GetValidatorSetByHeight queries validator-set at a given height. */ + GetValidatorSetByHeight(request: GetValidatorSetByHeightRequest): Promise; + /** + * ABCIQuery defines a query handler that supports ABCI queries directly to the + * application, bypassing Tendermint completely. The ABCI query must contain + * a valid and supported path, including app, custom, p2p, and store. + * + * Since: cosmos-sdk 0.46 + */ + ABCIQuery(request: ABCIQueryRequest): Promise; +} + +export class ServiceClientImpl implements Service { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.GetNodeInfo = this.GetNodeInfo.bind(this); + this.GetSyncing = this.GetSyncing.bind(this); + this.GetLatestBlock = this.GetLatestBlock.bind(this); + this.GetBlockByHeight = this.GetBlockByHeight.bind(this); + this.GetLatestValidatorSet = this.GetLatestValidatorSet.bind(this); + this.GetValidatorSetByHeight = this.GetValidatorSetByHeight.bind(this); + this.ABCIQuery = this.ABCIQuery.bind(this); + } + GetNodeInfo(request: GetNodeInfoRequest): Promise { + const data = GetNodeInfoRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.base.tendermint.v1beta1.Service", "GetNodeInfo", data); + return promise.then((data) => GetNodeInfoResponse.decode(new _m0.Reader(data))); + } + + GetSyncing(request: GetSyncingRequest): Promise { + const data = GetSyncingRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.base.tendermint.v1beta1.Service", "GetSyncing", data); + return promise.then((data) => GetSyncingResponse.decode(new _m0.Reader(data))); + } + + GetLatestBlock(request: GetLatestBlockRequest): Promise { + const data = GetLatestBlockRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.base.tendermint.v1beta1.Service", "GetLatestBlock", data); + return promise.then((data) => GetLatestBlockResponse.decode(new _m0.Reader(data))); + } + + GetBlockByHeight(request: GetBlockByHeightRequest): Promise { + const data = GetBlockByHeightRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.base.tendermint.v1beta1.Service", "GetBlockByHeight", data); + return promise.then((data) => GetBlockByHeightResponse.decode(new _m0.Reader(data))); + } + + GetLatestValidatorSet(request: GetLatestValidatorSetRequest): Promise { + const data = GetLatestValidatorSetRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.base.tendermint.v1beta1.Service", "GetLatestValidatorSet", data); + return promise.then((data) => GetLatestValidatorSetResponse.decode(new _m0.Reader(data))); + } + + GetValidatorSetByHeight(request: GetValidatorSetByHeightRequest): Promise { + const data = GetValidatorSetByHeightRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.base.tendermint.v1beta1.Service", "GetValidatorSetByHeight", data); + return promise.then((data) => GetValidatorSetByHeightResponse.decode(new _m0.Reader(data))); + } + + ABCIQuery(request: ABCIQueryRequest): Promise { + const data = ABCIQueryRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.base.tendermint.v1beta1.Service", "ABCIQuery", data); + return promise.then((data) => ABCIQueryResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/cosmos/base/tendermint/v1beta1/types.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/cosmos/base/tendermint/v1beta1/types.ts new file mode 100644 index 000000000..3df6f894e --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/cosmos/base/tendermint/v1beta1/types.ts @@ -0,0 +1,442 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Timestamp } from "../../../../google/protobuf/timestamp"; +import { EvidenceList } from "../../../../tendermint/types/evidence"; +import { BlockID, Commit, Data } from "../../../../tendermint/types/types"; +import { Consensus } from "../../../../tendermint/version/types"; + +export const protobufPackage = "cosmos.base.tendermint.v1beta1"; + +/** + * Block is tendermint type Block, with the Header proposer address + * field converted to bech32 string. + */ +export interface Block { + header: Header | undefined; + data: Data | undefined; + evidence: EvidenceList | undefined; + lastCommit: Commit | undefined; +} + +/** Header defines the structure of a Tendermint block header. */ +export interface Header { + /** basic block info */ + version: Consensus | undefined; + chainId: string; + height: number; + time: + | Date + | undefined; + /** prev block info */ + lastBlockId: + | BlockID + | undefined; + /** hashes of block data */ + lastCommitHash: Uint8Array; + /** transactions */ + dataHash: Uint8Array; + /** hashes from the app output from the prev block */ + validatorsHash: Uint8Array; + /** validators for the next block */ + nextValidatorsHash: Uint8Array; + /** consensus params for current block */ + consensusHash: Uint8Array; + /** state after txs from the previous block */ + appHash: Uint8Array; + /** root hash of all results from the txs from the previous block */ + lastResultsHash: Uint8Array; + /** consensus info */ + evidenceHash: Uint8Array; + /** + * proposer_address is the original block proposer address, formatted as a Bech32 string. + * In Tendermint, this type is `bytes`, but in the SDK, we convert it to a Bech32 string + * for better UX. + */ + proposerAddress: string; +} + +function createBaseBlock(): Block { + return { header: undefined, data: undefined, evidence: undefined, lastCommit: undefined }; +} + +export const Block = { + encode(message: Block, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.header !== undefined) { + Header.encode(message.header, writer.uint32(10).fork()).ldelim(); + } + if (message.data !== undefined) { + Data.encode(message.data, writer.uint32(18).fork()).ldelim(); + } + if (message.evidence !== undefined) { + EvidenceList.encode(message.evidence, writer.uint32(26).fork()).ldelim(); + } + if (message.lastCommit !== undefined) { + Commit.encode(message.lastCommit, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Block { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlock(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.header = Header.decode(reader, reader.uint32()); + break; + case 2: + message.data = Data.decode(reader, reader.uint32()); + break; + case 3: + message.evidence = EvidenceList.decode(reader, reader.uint32()); + break; + case 4: + message.lastCommit = Commit.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Block { + return { + header: isSet(object.header) ? Header.fromJSON(object.header) : undefined, + data: isSet(object.data) ? Data.fromJSON(object.data) : undefined, + evidence: isSet(object.evidence) ? EvidenceList.fromJSON(object.evidence) : undefined, + lastCommit: isSet(object.lastCommit) ? Commit.fromJSON(object.lastCommit) : undefined, + }; + }, + + toJSON(message: Block): unknown { + const obj: any = {}; + message.header !== undefined && (obj.header = message.header ? Header.toJSON(message.header) : undefined); + message.data !== undefined && (obj.data = message.data ? Data.toJSON(message.data) : undefined); + message.evidence !== undefined + && (obj.evidence = message.evidence ? EvidenceList.toJSON(message.evidence) : undefined); + message.lastCommit !== undefined + && (obj.lastCommit = message.lastCommit ? Commit.toJSON(message.lastCommit) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): Block { + const message = createBaseBlock(); + message.header = (object.header !== undefined && object.header !== null) + ? Header.fromPartial(object.header) + : undefined; + message.data = (object.data !== undefined && object.data !== null) ? Data.fromPartial(object.data) : undefined; + message.evidence = (object.evidence !== undefined && object.evidence !== null) + ? EvidenceList.fromPartial(object.evidence) + : undefined; + message.lastCommit = (object.lastCommit !== undefined && object.lastCommit !== null) + ? Commit.fromPartial(object.lastCommit) + : undefined; + return message; + }, +}; + +function createBaseHeader(): Header { + return { + version: undefined, + chainId: "", + height: 0, + time: undefined, + lastBlockId: undefined, + lastCommitHash: new Uint8Array(), + dataHash: new Uint8Array(), + validatorsHash: new Uint8Array(), + nextValidatorsHash: new Uint8Array(), + consensusHash: new Uint8Array(), + appHash: new Uint8Array(), + lastResultsHash: new Uint8Array(), + evidenceHash: new Uint8Array(), + proposerAddress: "", + }; +} + +export const Header = { + encode(message: Header, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.version !== undefined) { + Consensus.encode(message.version, writer.uint32(10).fork()).ldelim(); + } + if (message.chainId !== "") { + writer.uint32(18).string(message.chainId); + } + if (message.height !== 0) { + writer.uint32(24).int64(message.height); + } + if (message.time !== undefined) { + Timestamp.encode(toTimestamp(message.time), writer.uint32(34).fork()).ldelim(); + } + if (message.lastBlockId !== undefined) { + BlockID.encode(message.lastBlockId, writer.uint32(42).fork()).ldelim(); + } + if (message.lastCommitHash.length !== 0) { + writer.uint32(50).bytes(message.lastCommitHash); + } + if (message.dataHash.length !== 0) { + writer.uint32(58).bytes(message.dataHash); + } + if (message.validatorsHash.length !== 0) { + writer.uint32(66).bytes(message.validatorsHash); + } + if (message.nextValidatorsHash.length !== 0) { + writer.uint32(74).bytes(message.nextValidatorsHash); + } + if (message.consensusHash.length !== 0) { + writer.uint32(82).bytes(message.consensusHash); + } + if (message.appHash.length !== 0) { + writer.uint32(90).bytes(message.appHash); + } + if (message.lastResultsHash.length !== 0) { + writer.uint32(98).bytes(message.lastResultsHash); + } + if (message.evidenceHash.length !== 0) { + writer.uint32(106).bytes(message.evidenceHash); + } + if (message.proposerAddress !== "") { + writer.uint32(114).string(message.proposerAddress); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Header { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHeader(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.version = Consensus.decode(reader, reader.uint32()); + break; + case 2: + message.chainId = reader.string(); + break; + case 3: + message.height = longToNumber(reader.int64() as Long); + break; + case 4: + message.time = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 5: + message.lastBlockId = BlockID.decode(reader, reader.uint32()); + break; + case 6: + message.lastCommitHash = reader.bytes(); + break; + case 7: + message.dataHash = reader.bytes(); + break; + case 8: + message.validatorsHash = reader.bytes(); + break; + case 9: + message.nextValidatorsHash = reader.bytes(); + break; + case 10: + message.consensusHash = reader.bytes(); + break; + case 11: + message.appHash = reader.bytes(); + break; + case 12: + message.lastResultsHash = reader.bytes(); + break; + case 13: + message.evidenceHash = reader.bytes(); + break; + case 14: + message.proposerAddress = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Header { + return { + version: isSet(object.version) ? Consensus.fromJSON(object.version) : undefined, + chainId: isSet(object.chainId) ? String(object.chainId) : "", + height: isSet(object.height) ? Number(object.height) : 0, + time: isSet(object.time) ? fromJsonTimestamp(object.time) : undefined, + lastBlockId: isSet(object.lastBlockId) ? BlockID.fromJSON(object.lastBlockId) : undefined, + lastCommitHash: isSet(object.lastCommitHash) ? bytesFromBase64(object.lastCommitHash) : new Uint8Array(), + dataHash: isSet(object.dataHash) ? bytesFromBase64(object.dataHash) : new Uint8Array(), + validatorsHash: isSet(object.validatorsHash) ? bytesFromBase64(object.validatorsHash) : new Uint8Array(), + nextValidatorsHash: isSet(object.nextValidatorsHash) + ? bytesFromBase64(object.nextValidatorsHash) + : new Uint8Array(), + consensusHash: isSet(object.consensusHash) ? bytesFromBase64(object.consensusHash) : new Uint8Array(), + appHash: isSet(object.appHash) ? bytesFromBase64(object.appHash) : new Uint8Array(), + lastResultsHash: isSet(object.lastResultsHash) ? bytesFromBase64(object.lastResultsHash) : new Uint8Array(), + evidenceHash: isSet(object.evidenceHash) ? bytesFromBase64(object.evidenceHash) : new Uint8Array(), + proposerAddress: isSet(object.proposerAddress) ? String(object.proposerAddress) : "", + }; + }, + + toJSON(message: Header): unknown { + const obj: any = {}; + message.version !== undefined && (obj.version = message.version ? Consensus.toJSON(message.version) : undefined); + message.chainId !== undefined && (obj.chainId = message.chainId); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.time !== undefined && (obj.time = message.time.toISOString()); + message.lastBlockId !== undefined + && (obj.lastBlockId = message.lastBlockId ? BlockID.toJSON(message.lastBlockId) : undefined); + message.lastCommitHash !== undefined + && (obj.lastCommitHash = base64FromBytes( + message.lastCommitHash !== undefined ? message.lastCommitHash : new Uint8Array(), + )); + message.dataHash !== undefined + && (obj.dataHash = base64FromBytes(message.dataHash !== undefined ? message.dataHash : new Uint8Array())); + message.validatorsHash !== undefined + && (obj.validatorsHash = base64FromBytes( + message.validatorsHash !== undefined ? message.validatorsHash : new Uint8Array(), + )); + message.nextValidatorsHash !== undefined + && (obj.nextValidatorsHash = base64FromBytes( + message.nextValidatorsHash !== undefined ? message.nextValidatorsHash : new Uint8Array(), + )); + message.consensusHash !== undefined + && (obj.consensusHash = base64FromBytes( + message.consensusHash !== undefined ? message.consensusHash : new Uint8Array(), + )); + message.appHash !== undefined + && (obj.appHash = base64FromBytes(message.appHash !== undefined ? message.appHash : new Uint8Array())); + message.lastResultsHash !== undefined + && (obj.lastResultsHash = base64FromBytes( + message.lastResultsHash !== undefined ? message.lastResultsHash : new Uint8Array(), + )); + message.evidenceHash !== undefined + && (obj.evidenceHash = base64FromBytes( + message.evidenceHash !== undefined ? message.evidenceHash : new Uint8Array(), + )); + message.proposerAddress !== undefined && (obj.proposerAddress = message.proposerAddress); + return obj; + }, + + fromPartial, I>>(object: I): Header { + const message = createBaseHeader(); + message.version = (object.version !== undefined && object.version !== null) + ? Consensus.fromPartial(object.version) + : undefined; + message.chainId = object.chainId ?? ""; + message.height = object.height ?? 0; + message.time = object.time ?? undefined; + message.lastBlockId = (object.lastBlockId !== undefined && object.lastBlockId !== null) + ? BlockID.fromPartial(object.lastBlockId) + : undefined; + message.lastCommitHash = object.lastCommitHash ?? new Uint8Array(); + message.dataHash = object.dataHash ?? new Uint8Array(); + message.validatorsHash = object.validatorsHash ?? new Uint8Array(); + message.nextValidatorsHash = object.nextValidatorsHash ?? new Uint8Array(); + message.consensusHash = object.consensusHash ?? new Uint8Array(); + message.appHash = object.appHash ?? new Uint8Array(); + message.lastResultsHash = object.lastResultsHash ?? new Uint8Array(); + message.evidenceHash = object.evidenceHash ?? new Uint8Array(); + message.proposerAddress = object.proposerAddress ?? ""; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function toTimestamp(date: Date): Timestamp { + const seconds = date.getTime() / 1_000; + const nanos = (date.getTime() % 1_000) * 1_000_000; + return { seconds, nanos }; +} + +function fromTimestamp(t: Timestamp): Date { + let millis = t.seconds * 1_000; + millis += t.nanos / 1_000_000; + return new Date(millis); +} + +function fromJsonTimestamp(o: any): Date { + if (o instanceof Date) { + return o; + } else if (typeof o === "string") { + return new Date(o); + } else { + return fromTimestamp(Timestamp.fromJSON(o)); + } +} + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/cosmos_proto/cosmos.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..79c8d3486 --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/cosmos_proto/cosmos.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/gogoproto/gogo.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/google/api/annotations.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/google/api/annotations.ts new file mode 100644 index 000000000..aace4787f --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/google/api/annotations.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "google.api"; diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/google/api/http.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/google/api/http.ts new file mode 100644 index 000000000..17e770d15 --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/google/api/http.ts @@ -0,0 +1,589 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get: + | string + | undefined; + /** Used for updating a resource. */ + put: + | string + | undefined; + /** Used for creating a resource. */ + post: + | string + | undefined; + /** Used for deleting a resource. */ + delete: + | string + | undefined; + /** Used for updating a resource. */ + patch: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined + && (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined + && (obj.custom = message.custom ? CustomHttpPattern.toJSON(message.custom) : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.additionalBindings = []; + } + return obj; + }, + + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/google/protobuf/any.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/google/protobuf/any.ts new file mode 100644 index 000000000..c4ebf38be --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/google/protobuf/any.ts @@ -0,0 +1,240 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := anypb.New(foo) + * if err != nil { + * ... + * } + * ... + * foo := &pb.Foo{} + * if err := any.UnmarshalTo(foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface Any { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + typeUrl: string; + /** Must be a valid serialized protocol buffer of the above specified type. */ + value: Uint8Array; +} + +function createBaseAny(): Any { + return { typeUrl: "", value: new Uint8Array() }; +} + +export const Any = { + encode(message: Any, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.typeUrl !== "") { + writer.uint32(10).string(message.typeUrl); + } + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Any { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAny(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.typeUrl = reader.string(); + break; + case 2: + message.value = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Any { + return { + typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "", + value: isSet(object.value) ? bytesFromBase64(object.value) : new Uint8Array(), + }; + }, + + toJSON(message: Any): unknown { + const obj: any = {}; + message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl); + message.value !== undefined + && (obj.value = base64FromBytes(message.value !== undefined ? message.value : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): Any { + const message = createBaseAny(); + message.typeUrl = object.typeUrl ?? ""; + message.value = object.value ?? new Uint8Array(); + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/google/protobuf/descriptor.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/google/protobuf/timestamp.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/google/protobuf/timestamp.ts new file mode 100644 index 000000000..b00bb1b72 --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/google/protobuf/timestamp.ts @@ -0,0 +1,216 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * Example 5: Compute Timestamp from Java `Instant.now()`. + * + * Instant now = Instant.now(); + * + * Timestamp timestamp = + * Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + * .setNanos(now.getNano()).build(); + * + * Example 6: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ +export interface Timestamp { + /** + * Represents seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + */ + seconds: number; + /** + * Non-negative fractions of a second at nanosecond resolution. Negative + * second values with fractions must still have non-negative nanos values + * that count forward in time. Must be from 0 to 999,999,999 + * inclusive. + */ + nanos: number; +} + +function createBaseTimestamp(): Timestamp { + return { seconds: 0, nanos: 0 }; +} + +export const Timestamp = { + encode(message: Timestamp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.seconds !== 0) { + writer.uint32(8).int64(message.seconds); + } + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Timestamp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTimestamp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.seconds = longToNumber(reader.int64() as Long); + break; + case 2: + message.nanos = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Timestamp { + return { + seconds: isSet(object.seconds) ? Number(object.seconds) : 0, + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; + }, + + toJSON(message: Timestamp): unknown { + const obj: any = {}; + message.seconds !== undefined && (obj.seconds = Math.round(message.seconds)); + message.nanos !== undefined && (obj.nanos = Math.round(message.nanos)); + return obj; + }, + + fromPartial, I>>(object: I): Timestamp { + const message = createBaseTimestamp(); + message.seconds = object.seconds ?? 0; + message.nanos = object.nanos ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/crypto/keys.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/crypto/keys.ts new file mode 100644 index 000000000..82d64fdf6 --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/crypto/keys.ts @@ -0,0 +1,129 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "tendermint.crypto"; + +/** PublicKey defines the keys available for use with Validators */ +export interface PublicKey { + ed25519: Uint8Array | undefined; + secp256k1: Uint8Array | undefined; +} + +function createBasePublicKey(): PublicKey { + return { ed25519: undefined, secp256k1: undefined }; +} + +export const PublicKey = { + encode(message: PublicKey, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ed25519 !== undefined) { + writer.uint32(10).bytes(message.ed25519); + } + if (message.secp256k1 !== undefined) { + writer.uint32(18).bytes(message.secp256k1); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PublicKey { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePublicKey(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ed25519 = reader.bytes(); + break; + case 2: + message.secp256k1 = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PublicKey { + return { + ed25519: isSet(object.ed25519) ? bytesFromBase64(object.ed25519) : undefined, + secp256k1: isSet(object.secp256k1) ? bytesFromBase64(object.secp256k1) : undefined, + }; + }, + + toJSON(message: PublicKey): unknown { + const obj: any = {}; + message.ed25519 !== undefined + && (obj.ed25519 = message.ed25519 !== undefined ? base64FromBytes(message.ed25519) : undefined); + message.secp256k1 !== undefined + && (obj.secp256k1 = message.secp256k1 !== undefined ? base64FromBytes(message.secp256k1) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): PublicKey { + const message = createBasePublicKey(); + message.ed25519 = object.ed25519 ?? undefined; + message.secp256k1 = object.secp256k1 ?? undefined; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/crypto/proof.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/crypto/proof.ts new file mode 100644 index 000000000..a307c776c --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/crypto/proof.ts @@ -0,0 +1,439 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "tendermint.crypto"; + +export interface Proof { + total: number; + index: number; + leafHash: Uint8Array; + aunts: Uint8Array[]; +} + +export interface ValueOp { + /** Encoded in ProofOp.Key. */ + key: Uint8Array; + /** To encode in ProofOp.Data */ + proof: Proof | undefined; +} + +export interface DominoOp { + key: string; + input: string; + output: string; +} + +/** + * ProofOp defines an operation used for calculating Merkle root + * The data could be arbitrary format, providing nessecary data + * for example neighbouring node hash + */ +export interface ProofOp { + type: string; + key: Uint8Array; + data: Uint8Array; +} + +/** ProofOps is Merkle proof defined by the list of ProofOps */ +export interface ProofOps { + ops: ProofOp[]; +} + +function createBaseProof(): Proof { + return { total: 0, index: 0, leafHash: new Uint8Array(), aunts: [] }; +} + +export const Proof = { + encode(message: Proof, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.total !== 0) { + writer.uint32(8).int64(message.total); + } + if (message.index !== 0) { + writer.uint32(16).int64(message.index); + } + if (message.leafHash.length !== 0) { + writer.uint32(26).bytes(message.leafHash); + } + for (const v of message.aunts) { + writer.uint32(34).bytes(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Proof { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProof(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.total = longToNumber(reader.int64() as Long); + break; + case 2: + message.index = longToNumber(reader.int64() as Long); + break; + case 3: + message.leafHash = reader.bytes(); + break; + case 4: + message.aunts.push(reader.bytes()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Proof { + return { + total: isSet(object.total) ? Number(object.total) : 0, + index: isSet(object.index) ? Number(object.index) : 0, + leafHash: isSet(object.leafHash) ? bytesFromBase64(object.leafHash) : new Uint8Array(), + aunts: Array.isArray(object?.aunts) ? object.aunts.map((e: any) => bytesFromBase64(e)) : [], + }; + }, + + toJSON(message: Proof): unknown { + const obj: any = {}; + message.total !== undefined && (obj.total = Math.round(message.total)); + message.index !== undefined && (obj.index = Math.round(message.index)); + message.leafHash !== undefined + && (obj.leafHash = base64FromBytes(message.leafHash !== undefined ? message.leafHash : new Uint8Array())); + if (message.aunts) { + obj.aunts = message.aunts.map((e) => base64FromBytes(e !== undefined ? e : new Uint8Array())); + } else { + obj.aunts = []; + } + return obj; + }, + + fromPartial, I>>(object: I): Proof { + const message = createBaseProof(); + message.total = object.total ?? 0; + message.index = object.index ?? 0; + message.leafHash = object.leafHash ?? new Uint8Array(); + message.aunts = object.aunts?.map((e) => e) || []; + return message; + }, +}; + +function createBaseValueOp(): ValueOp { + return { key: new Uint8Array(), proof: undefined }; +} + +export const ValueOp = { + encode(message: ValueOp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.proof !== undefined) { + Proof.encode(message.proof, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValueOp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValueOp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + case 2: + message.proof = Proof.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ValueOp { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + proof: isSet(object.proof) ? Proof.fromJSON(object.proof) : undefined, + }; + }, + + toJSON(message: ValueOp): unknown { + const obj: any = {}; + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.proof !== undefined && (obj.proof = message.proof ? Proof.toJSON(message.proof) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ValueOp { + const message = createBaseValueOp(); + message.key = object.key ?? new Uint8Array(); + message.proof = (object.proof !== undefined && object.proof !== null) ? Proof.fromPartial(object.proof) : undefined; + return message; + }, +}; + +function createBaseDominoOp(): DominoOp { + return { key: "", input: "", output: "" }; +} + +export const DominoOp = { + encode(message: DominoOp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.input !== "") { + writer.uint32(18).string(message.input); + } + if (message.output !== "") { + writer.uint32(26).string(message.output); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DominoOp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDominoOp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.string(); + break; + case 2: + message.input = reader.string(); + break; + case 3: + message.output = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DominoOp { + return { + key: isSet(object.key) ? String(object.key) : "", + input: isSet(object.input) ? String(object.input) : "", + output: isSet(object.output) ? String(object.output) : "", + }; + }, + + toJSON(message: DominoOp): unknown { + const obj: any = {}; + message.key !== undefined && (obj.key = message.key); + message.input !== undefined && (obj.input = message.input); + message.output !== undefined && (obj.output = message.output); + return obj; + }, + + fromPartial, I>>(object: I): DominoOp { + const message = createBaseDominoOp(); + message.key = object.key ?? ""; + message.input = object.input ?? ""; + message.output = object.output ?? ""; + return message; + }, +}; + +function createBaseProofOp(): ProofOp { + return { type: "", key: new Uint8Array(), data: new Uint8Array() }; +} + +export const ProofOp = { + encode(message: ProofOp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== "") { + writer.uint32(10).string(message.type); + } + if (message.key.length !== 0) { + writer.uint32(18).bytes(message.key); + } + if (message.data.length !== 0) { + writer.uint32(26).bytes(message.data); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProofOp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProofOp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.type = reader.string(); + break; + case 2: + message.key = reader.bytes(); + break; + case 3: + message.data = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ProofOp { + return { + type: isSet(object.type) ? String(object.type) : "", + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(), + }; + }, + + toJSON(message: ProofOp): unknown { + const obj: any = {}; + message.type !== undefined && (obj.type = message.type); + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.data !== undefined + && (obj.data = base64FromBytes(message.data !== undefined ? message.data : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): ProofOp { + const message = createBaseProofOp(); + message.type = object.type ?? ""; + message.key = object.key ?? new Uint8Array(); + message.data = object.data ?? new Uint8Array(); + return message; + }, +}; + +function createBaseProofOps(): ProofOps { + return { ops: [] }; +} + +export const ProofOps = { + encode(message: ProofOps, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.ops) { + ProofOp.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProofOps { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProofOps(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ops.push(ProofOp.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ProofOps { + return { ops: Array.isArray(object?.ops) ? object.ops.map((e: any) => ProofOp.fromJSON(e)) : [] }; + }, + + toJSON(message: ProofOps): unknown { + const obj: any = {}; + if (message.ops) { + obj.ops = message.ops.map((e) => e ? ProofOp.toJSON(e) : undefined); + } else { + obj.ops = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ProofOps { + const message = createBaseProofOps(); + message.ops = object.ops?.map((e) => ProofOp.fromPartial(e)) || []; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/p2p/types.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/p2p/types.ts new file mode 100644 index 000000000..5ba4b1737 --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/p2p/types.ts @@ -0,0 +1,423 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "tendermint.p2p"; + +export interface NetAddress { + id: string; + ip: string; + port: number; +} + +export interface ProtocolVersion { + p2p: number; + block: number; + app: number; +} + +export interface DefaultNodeInfo { + protocolVersion: ProtocolVersion | undefined; + defaultNodeId: string; + listenAddr: string; + network: string; + version: string; + channels: Uint8Array; + moniker: string; + other: DefaultNodeInfoOther | undefined; +} + +export interface DefaultNodeInfoOther { + txIndex: string; + rpcAddress: string; +} + +function createBaseNetAddress(): NetAddress { + return { id: "", ip: "", port: 0 }; +} + +export const NetAddress = { + encode(message: NetAddress, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.ip !== "") { + writer.uint32(18).string(message.ip); + } + if (message.port !== 0) { + writer.uint32(24).uint32(message.port); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): NetAddress { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseNetAddress(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.id = reader.string(); + break; + case 2: + message.ip = reader.string(); + break; + case 3: + message.port = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): NetAddress { + return { + id: isSet(object.id) ? String(object.id) : "", + ip: isSet(object.ip) ? String(object.ip) : "", + port: isSet(object.port) ? Number(object.port) : 0, + }; + }, + + toJSON(message: NetAddress): unknown { + const obj: any = {}; + message.id !== undefined && (obj.id = message.id); + message.ip !== undefined && (obj.ip = message.ip); + message.port !== undefined && (obj.port = Math.round(message.port)); + return obj; + }, + + fromPartial, I>>(object: I): NetAddress { + const message = createBaseNetAddress(); + message.id = object.id ?? ""; + message.ip = object.ip ?? ""; + message.port = object.port ?? 0; + return message; + }, +}; + +function createBaseProtocolVersion(): ProtocolVersion { + return { p2p: 0, block: 0, app: 0 }; +} + +export const ProtocolVersion = { + encode(message: ProtocolVersion, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.p2p !== 0) { + writer.uint32(8).uint64(message.p2p); + } + if (message.block !== 0) { + writer.uint32(16).uint64(message.block); + } + if (message.app !== 0) { + writer.uint32(24).uint64(message.app); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProtocolVersion { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProtocolVersion(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.p2p = longToNumber(reader.uint64() as Long); + break; + case 2: + message.block = longToNumber(reader.uint64() as Long); + break; + case 3: + message.app = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ProtocolVersion { + return { + p2p: isSet(object.p2p) ? Number(object.p2p) : 0, + block: isSet(object.block) ? Number(object.block) : 0, + app: isSet(object.app) ? Number(object.app) : 0, + }; + }, + + toJSON(message: ProtocolVersion): unknown { + const obj: any = {}; + message.p2p !== undefined && (obj.p2p = Math.round(message.p2p)); + message.block !== undefined && (obj.block = Math.round(message.block)); + message.app !== undefined && (obj.app = Math.round(message.app)); + return obj; + }, + + fromPartial, I>>(object: I): ProtocolVersion { + const message = createBaseProtocolVersion(); + message.p2p = object.p2p ?? 0; + message.block = object.block ?? 0; + message.app = object.app ?? 0; + return message; + }, +}; + +function createBaseDefaultNodeInfo(): DefaultNodeInfo { + return { + protocolVersion: undefined, + defaultNodeId: "", + listenAddr: "", + network: "", + version: "", + channels: new Uint8Array(), + moniker: "", + other: undefined, + }; +} + +export const DefaultNodeInfo = { + encode(message: DefaultNodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.protocolVersion !== undefined) { + ProtocolVersion.encode(message.protocolVersion, writer.uint32(10).fork()).ldelim(); + } + if (message.defaultNodeId !== "") { + writer.uint32(18).string(message.defaultNodeId); + } + if (message.listenAddr !== "") { + writer.uint32(26).string(message.listenAddr); + } + if (message.network !== "") { + writer.uint32(34).string(message.network); + } + if (message.version !== "") { + writer.uint32(42).string(message.version); + } + if (message.channels.length !== 0) { + writer.uint32(50).bytes(message.channels); + } + if (message.moniker !== "") { + writer.uint32(58).string(message.moniker); + } + if (message.other !== undefined) { + DefaultNodeInfoOther.encode(message.other, writer.uint32(66).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DefaultNodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDefaultNodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.protocolVersion = ProtocolVersion.decode(reader, reader.uint32()); + break; + case 2: + message.defaultNodeId = reader.string(); + break; + case 3: + message.listenAddr = reader.string(); + break; + case 4: + message.network = reader.string(); + break; + case 5: + message.version = reader.string(); + break; + case 6: + message.channels = reader.bytes(); + break; + case 7: + message.moniker = reader.string(); + break; + case 8: + message.other = DefaultNodeInfoOther.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DefaultNodeInfo { + return { + protocolVersion: isSet(object.protocolVersion) ? ProtocolVersion.fromJSON(object.protocolVersion) : undefined, + defaultNodeId: isSet(object.defaultNodeId) ? String(object.defaultNodeId) : "", + listenAddr: isSet(object.listenAddr) ? String(object.listenAddr) : "", + network: isSet(object.network) ? String(object.network) : "", + version: isSet(object.version) ? String(object.version) : "", + channels: isSet(object.channels) ? bytesFromBase64(object.channels) : new Uint8Array(), + moniker: isSet(object.moniker) ? String(object.moniker) : "", + other: isSet(object.other) ? DefaultNodeInfoOther.fromJSON(object.other) : undefined, + }; + }, + + toJSON(message: DefaultNodeInfo): unknown { + const obj: any = {}; + message.protocolVersion !== undefined + && (obj.protocolVersion = message.protocolVersion ? ProtocolVersion.toJSON(message.protocolVersion) : undefined); + message.defaultNodeId !== undefined && (obj.defaultNodeId = message.defaultNodeId); + message.listenAddr !== undefined && (obj.listenAddr = message.listenAddr); + message.network !== undefined && (obj.network = message.network); + message.version !== undefined && (obj.version = message.version); + message.channels !== undefined + && (obj.channels = base64FromBytes(message.channels !== undefined ? message.channels : new Uint8Array())); + message.moniker !== undefined && (obj.moniker = message.moniker); + message.other !== undefined && (obj.other = message.other ? DefaultNodeInfoOther.toJSON(message.other) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): DefaultNodeInfo { + const message = createBaseDefaultNodeInfo(); + message.protocolVersion = (object.protocolVersion !== undefined && object.protocolVersion !== null) + ? ProtocolVersion.fromPartial(object.protocolVersion) + : undefined; + message.defaultNodeId = object.defaultNodeId ?? ""; + message.listenAddr = object.listenAddr ?? ""; + message.network = object.network ?? ""; + message.version = object.version ?? ""; + message.channels = object.channels ?? new Uint8Array(); + message.moniker = object.moniker ?? ""; + message.other = (object.other !== undefined && object.other !== null) + ? DefaultNodeInfoOther.fromPartial(object.other) + : undefined; + return message; + }, +}; + +function createBaseDefaultNodeInfoOther(): DefaultNodeInfoOther { + return { txIndex: "", rpcAddress: "" }; +} + +export const DefaultNodeInfoOther = { + encode(message: DefaultNodeInfoOther, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.txIndex !== "") { + writer.uint32(10).string(message.txIndex); + } + if (message.rpcAddress !== "") { + writer.uint32(18).string(message.rpcAddress); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DefaultNodeInfoOther { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDefaultNodeInfoOther(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.txIndex = reader.string(); + break; + case 2: + message.rpcAddress = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DefaultNodeInfoOther { + return { + txIndex: isSet(object.txIndex) ? String(object.txIndex) : "", + rpcAddress: isSet(object.rpcAddress) ? String(object.rpcAddress) : "", + }; + }, + + toJSON(message: DefaultNodeInfoOther): unknown { + const obj: any = {}; + message.txIndex !== undefined && (obj.txIndex = message.txIndex); + message.rpcAddress !== undefined && (obj.rpcAddress = message.rpcAddress); + return obj; + }, + + fromPartial, I>>(object: I): DefaultNodeInfoOther { + const message = createBaseDefaultNodeInfoOther(); + message.txIndex = object.txIndex ?? ""; + message.rpcAddress = object.rpcAddress ?? ""; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/types/block.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/types/block.ts new file mode 100644 index 000000000..329acae8b --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/types/block.ts @@ -0,0 +1,112 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { EvidenceList } from "./evidence"; +import { Commit, Data, Header } from "./types"; + +export const protobufPackage = "tendermint.types"; + +export interface Block { + header: Header | undefined; + data: Data | undefined; + evidence: EvidenceList | undefined; + lastCommit: Commit | undefined; +} + +function createBaseBlock(): Block { + return { header: undefined, data: undefined, evidence: undefined, lastCommit: undefined }; +} + +export const Block = { + encode(message: Block, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.header !== undefined) { + Header.encode(message.header, writer.uint32(10).fork()).ldelim(); + } + if (message.data !== undefined) { + Data.encode(message.data, writer.uint32(18).fork()).ldelim(); + } + if (message.evidence !== undefined) { + EvidenceList.encode(message.evidence, writer.uint32(26).fork()).ldelim(); + } + if (message.lastCommit !== undefined) { + Commit.encode(message.lastCommit, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Block { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlock(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.header = Header.decode(reader, reader.uint32()); + break; + case 2: + message.data = Data.decode(reader, reader.uint32()); + break; + case 3: + message.evidence = EvidenceList.decode(reader, reader.uint32()); + break; + case 4: + message.lastCommit = Commit.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Block { + return { + header: isSet(object.header) ? Header.fromJSON(object.header) : undefined, + data: isSet(object.data) ? Data.fromJSON(object.data) : undefined, + evidence: isSet(object.evidence) ? EvidenceList.fromJSON(object.evidence) : undefined, + lastCommit: isSet(object.lastCommit) ? Commit.fromJSON(object.lastCommit) : undefined, + }; + }, + + toJSON(message: Block): unknown { + const obj: any = {}; + message.header !== undefined && (obj.header = message.header ? Header.toJSON(message.header) : undefined); + message.data !== undefined && (obj.data = message.data ? Data.toJSON(message.data) : undefined); + message.evidence !== undefined + && (obj.evidence = message.evidence ? EvidenceList.toJSON(message.evidence) : undefined); + message.lastCommit !== undefined + && (obj.lastCommit = message.lastCommit ? Commit.toJSON(message.lastCommit) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): Block { + const message = createBaseBlock(); + message.header = (object.header !== undefined && object.header !== null) + ? Header.fromPartial(object.header) + : undefined; + message.data = (object.data !== undefined && object.data !== null) ? Data.fromPartial(object.data) : undefined; + message.evidence = (object.evidence !== undefined && object.evidence !== null) + ? EvidenceList.fromPartial(object.evidence) + : undefined; + message.lastCommit = (object.lastCommit !== undefined && object.lastCommit !== null) + ? Commit.fromPartial(object.lastCommit) + : undefined; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/types/evidence.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/types/evidence.ts new file mode 100644 index 000000000..98a4eb92e --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/types/evidence.ts @@ -0,0 +1,412 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Timestamp } from "../../google/protobuf/timestamp"; +import { LightBlock, Vote } from "./types"; +import { Validator } from "./validator"; + +export const protobufPackage = "tendermint.types"; + +export interface Evidence { + duplicateVoteEvidence: DuplicateVoteEvidence | undefined; + lightClientAttackEvidence: LightClientAttackEvidence | undefined; +} + +/** DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes. */ +export interface DuplicateVoteEvidence { + voteA: Vote | undefined; + voteB: Vote | undefined; + totalVotingPower: number; + validatorPower: number; + timestamp: Date | undefined; +} + +/** LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client. */ +export interface LightClientAttackEvidence { + conflictingBlock: LightBlock | undefined; + commonHeight: number; + byzantineValidators: Validator[]; + totalVotingPower: number; + timestamp: Date | undefined; +} + +export interface EvidenceList { + evidence: Evidence[]; +} + +function createBaseEvidence(): Evidence { + return { duplicateVoteEvidence: undefined, lightClientAttackEvidence: undefined }; +} + +export const Evidence = { + encode(message: Evidence, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.duplicateVoteEvidence !== undefined) { + DuplicateVoteEvidence.encode(message.duplicateVoteEvidence, writer.uint32(10).fork()).ldelim(); + } + if (message.lightClientAttackEvidence !== undefined) { + LightClientAttackEvidence.encode(message.lightClientAttackEvidence, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Evidence { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEvidence(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.duplicateVoteEvidence = DuplicateVoteEvidence.decode(reader, reader.uint32()); + break; + case 2: + message.lightClientAttackEvidence = LightClientAttackEvidence.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Evidence { + return { + duplicateVoteEvidence: isSet(object.duplicateVoteEvidence) + ? DuplicateVoteEvidence.fromJSON(object.duplicateVoteEvidence) + : undefined, + lightClientAttackEvidence: isSet(object.lightClientAttackEvidence) + ? LightClientAttackEvidence.fromJSON(object.lightClientAttackEvidence) + : undefined, + }; + }, + + toJSON(message: Evidence): unknown { + const obj: any = {}; + message.duplicateVoteEvidence !== undefined && (obj.duplicateVoteEvidence = message.duplicateVoteEvidence + ? DuplicateVoteEvidence.toJSON(message.duplicateVoteEvidence) + : undefined); + message.lightClientAttackEvidence !== undefined + && (obj.lightClientAttackEvidence = message.lightClientAttackEvidence + ? LightClientAttackEvidence.toJSON(message.lightClientAttackEvidence) + : undefined); + return obj; + }, + + fromPartial, I>>(object: I): Evidence { + const message = createBaseEvidence(); + message.duplicateVoteEvidence = + (object.duplicateVoteEvidence !== undefined && object.duplicateVoteEvidence !== null) + ? DuplicateVoteEvidence.fromPartial(object.duplicateVoteEvidence) + : undefined; + message.lightClientAttackEvidence = + (object.lightClientAttackEvidence !== undefined && object.lightClientAttackEvidence !== null) + ? LightClientAttackEvidence.fromPartial(object.lightClientAttackEvidence) + : undefined; + return message; + }, +}; + +function createBaseDuplicateVoteEvidence(): DuplicateVoteEvidence { + return { voteA: undefined, voteB: undefined, totalVotingPower: 0, validatorPower: 0, timestamp: undefined }; +} + +export const DuplicateVoteEvidence = { + encode(message: DuplicateVoteEvidence, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.voteA !== undefined) { + Vote.encode(message.voteA, writer.uint32(10).fork()).ldelim(); + } + if (message.voteB !== undefined) { + Vote.encode(message.voteB, writer.uint32(18).fork()).ldelim(); + } + if (message.totalVotingPower !== 0) { + writer.uint32(24).int64(message.totalVotingPower); + } + if (message.validatorPower !== 0) { + writer.uint32(32).int64(message.validatorPower); + } + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(42).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DuplicateVoteEvidence { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDuplicateVoteEvidence(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.voteA = Vote.decode(reader, reader.uint32()); + break; + case 2: + message.voteB = Vote.decode(reader, reader.uint32()); + break; + case 3: + message.totalVotingPower = longToNumber(reader.int64() as Long); + break; + case 4: + message.validatorPower = longToNumber(reader.int64() as Long); + break; + case 5: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DuplicateVoteEvidence { + return { + voteA: isSet(object.voteA) ? Vote.fromJSON(object.voteA) : undefined, + voteB: isSet(object.voteB) ? Vote.fromJSON(object.voteB) : undefined, + totalVotingPower: isSet(object.totalVotingPower) ? Number(object.totalVotingPower) : 0, + validatorPower: isSet(object.validatorPower) ? Number(object.validatorPower) : 0, + timestamp: isSet(object.timestamp) ? fromJsonTimestamp(object.timestamp) : undefined, + }; + }, + + toJSON(message: DuplicateVoteEvidence): unknown { + const obj: any = {}; + message.voteA !== undefined && (obj.voteA = message.voteA ? Vote.toJSON(message.voteA) : undefined); + message.voteB !== undefined && (obj.voteB = message.voteB ? Vote.toJSON(message.voteB) : undefined); + message.totalVotingPower !== undefined && (obj.totalVotingPower = Math.round(message.totalVotingPower)); + message.validatorPower !== undefined && (obj.validatorPower = Math.round(message.validatorPower)); + message.timestamp !== undefined && (obj.timestamp = message.timestamp.toISOString()); + return obj; + }, + + fromPartial, I>>(object: I): DuplicateVoteEvidence { + const message = createBaseDuplicateVoteEvidence(); + message.voteA = (object.voteA !== undefined && object.voteA !== null) ? Vote.fromPartial(object.voteA) : undefined; + message.voteB = (object.voteB !== undefined && object.voteB !== null) ? Vote.fromPartial(object.voteB) : undefined; + message.totalVotingPower = object.totalVotingPower ?? 0; + message.validatorPower = object.validatorPower ?? 0; + message.timestamp = object.timestamp ?? undefined; + return message; + }, +}; + +function createBaseLightClientAttackEvidence(): LightClientAttackEvidence { + return { + conflictingBlock: undefined, + commonHeight: 0, + byzantineValidators: [], + totalVotingPower: 0, + timestamp: undefined, + }; +} + +export const LightClientAttackEvidence = { + encode(message: LightClientAttackEvidence, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.conflictingBlock !== undefined) { + LightBlock.encode(message.conflictingBlock, writer.uint32(10).fork()).ldelim(); + } + if (message.commonHeight !== 0) { + writer.uint32(16).int64(message.commonHeight); + } + for (const v of message.byzantineValidators) { + Validator.encode(v!, writer.uint32(26).fork()).ldelim(); + } + if (message.totalVotingPower !== 0) { + writer.uint32(32).int64(message.totalVotingPower); + } + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(42).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): LightClientAttackEvidence { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseLightClientAttackEvidence(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.conflictingBlock = LightBlock.decode(reader, reader.uint32()); + break; + case 2: + message.commonHeight = longToNumber(reader.int64() as Long); + break; + case 3: + message.byzantineValidators.push(Validator.decode(reader, reader.uint32())); + break; + case 4: + message.totalVotingPower = longToNumber(reader.int64() as Long); + break; + case 5: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): LightClientAttackEvidence { + return { + conflictingBlock: isSet(object.conflictingBlock) ? LightBlock.fromJSON(object.conflictingBlock) : undefined, + commonHeight: isSet(object.commonHeight) ? Number(object.commonHeight) : 0, + byzantineValidators: Array.isArray(object?.byzantineValidators) + ? object.byzantineValidators.map((e: any) => Validator.fromJSON(e)) + : [], + totalVotingPower: isSet(object.totalVotingPower) ? Number(object.totalVotingPower) : 0, + timestamp: isSet(object.timestamp) ? fromJsonTimestamp(object.timestamp) : undefined, + }; + }, + + toJSON(message: LightClientAttackEvidence): unknown { + const obj: any = {}; + message.conflictingBlock !== undefined + && (obj.conflictingBlock = message.conflictingBlock ? LightBlock.toJSON(message.conflictingBlock) : undefined); + message.commonHeight !== undefined && (obj.commonHeight = Math.round(message.commonHeight)); + if (message.byzantineValidators) { + obj.byzantineValidators = message.byzantineValidators.map((e) => e ? Validator.toJSON(e) : undefined); + } else { + obj.byzantineValidators = []; + } + message.totalVotingPower !== undefined && (obj.totalVotingPower = Math.round(message.totalVotingPower)); + message.timestamp !== undefined && (obj.timestamp = message.timestamp.toISOString()); + return obj; + }, + + fromPartial, I>>(object: I): LightClientAttackEvidence { + const message = createBaseLightClientAttackEvidence(); + message.conflictingBlock = (object.conflictingBlock !== undefined && object.conflictingBlock !== null) + ? LightBlock.fromPartial(object.conflictingBlock) + : undefined; + message.commonHeight = object.commonHeight ?? 0; + message.byzantineValidators = object.byzantineValidators?.map((e) => Validator.fromPartial(e)) || []; + message.totalVotingPower = object.totalVotingPower ?? 0; + message.timestamp = object.timestamp ?? undefined; + return message; + }, +}; + +function createBaseEvidenceList(): EvidenceList { + return { evidence: [] }; +} + +export const EvidenceList = { + encode(message: EvidenceList, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.evidence) { + Evidence.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EvidenceList { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEvidenceList(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.evidence.push(Evidence.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EvidenceList { + return { evidence: Array.isArray(object?.evidence) ? object.evidence.map((e: any) => Evidence.fromJSON(e)) : [] }; + }, + + toJSON(message: EvidenceList): unknown { + const obj: any = {}; + if (message.evidence) { + obj.evidence = message.evidence.map((e) => e ? Evidence.toJSON(e) : undefined); + } else { + obj.evidence = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EvidenceList { + const message = createBaseEvidenceList(); + message.evidence = object.evidence?.map((e) => Evidence.fromPartial(e)) || []; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function toTimestamp(date: Date): Timestamp { + const seconds = date.getTime() / 1_000; + const nanos = (date.getTime() % 1_000) * 1_000_000; + return { seconds, nanos }; +} + +function fromTimestamp(t: Timestamp): Date { + let millis = t.seconds * 1_000; + millis += t.nanos / 1_000_000; + return new Date(millis); +} + +function fromJsonTimestamp(o: any): Date { + if (o instanceof Date) { + return o; + } else if (typeof o === "string") { + return new Date(o); + } else { + return fromTimestamp(Timestamp.fromJSON(o)); + } +} + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/types/types.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/types/types.ts new file mode 100644 index 000000000..93cdfab9b --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/types/types.ts @@ -0,0 +1,1452 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Timestamp } from "../../google/protobuf/timestamp"; +import { Proof } from "../crypto/proof"; +import { Consensus } from "../version/types"; +import { ValidatorSet } from "./validator"; + +export const protobufPackage = "tendermint.types"; + +/** BlockIdFlag indicates which BlcokID the signature is for */ +export enum BlockIDFlag { + BLOCK_ID_FLAG_UNKNOWN = 0, + BLOCK_ID_FLAG_ABSENT = 1, + BLOCK_ID_FLAG_COMMIT = 2, + BLOCK_ID_FLAG_NIL = 3, + UNRECOGNIZED = -1, +} + +export function blockIDFlagFromJSON(object: any): BlockIDFlag { + switch (object) { + case 0: + case "BLOCK_ID_FLAG_UNKNOWN": + return BlockIDFlag.BLOCK_ID_FLAG_UNKNOWN; + case 1: + case "BLOCK_ID_FLAG_ABSENT": + return BlockIDFlag.BLOCK_ID_FLAG_ABSENT; + case 2: + case "BLOCK_ID_FLAG_COMMIT": + return BlockIDFlag.BLOCK_ID_FLAG_COMMIT; + case 3: + case "BLOCK_ID_FLAG_NIL": + return BlockIDFlag.BLOCK_ID_FLAG_NIL; + case -1: + case "UNRECOGNIZED": + default: + return BlockIDFlag.UNRECOGNIZED; + } +} + +export function blockIDFlagToJSON(object: BlockIDFlag): string { + switch (object) { + case BlockIDFlag.BLOCK_ID_FLAG_UNKNOWN: + return "BLOCK_ID_FLAG_UNKNOWN"; + case BlockIDFlag.BLOCK_ID_FLAG_ABSENT: + return "BLOCK_ID_FLAG_ABSENT"; + case BlockIDFlag.BLOCK_ID_FLAG_COMMIT: + return "BLOCK_ID_FLAG_COMMIT"; + case BlockIDFlag.BLOCK_ID_FLAG_NIL: + return "BLOCK_ID_FLAG_NIL"; + case BlockIDFlag.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** SignedMsgType is a type of signed message in the consensus. */ +export enum SignedMsgType { + SIGNED_MSG_TYPE_UNKNOWN = 0, + /** SIGNED_MSG_TYPE_PREVOTE - Votes */ + SIGNED_MSG_TYPE_PREVOTE = 1, + SIGNED_MSG_TYPE_PRECOMMIT = 2, + /** SIGNED_MSG_TYPE_PROPOSAL - Proposals */ + SIGNED_MSG_TYPE_PROPOSAL = 32, + UNRECOGNIZED = -1, +} + +export function signedMsgTypeFromJSON(object: any): SignedMsgType { + switch (object) { + case 0: + case "SIGNED_MSG_TYPE_UNKNOWN": + return SignedMsgType.SIGNED_MSG_TYPE_UNKNOWN; + case 1: + case "SIGNED_MSG_TYPE_PREVOTE": + return SignedMsgType.SIGNED_MSG_TYPE_PREVOTE; + case 2: + case "SIGNED_MSG_TYPE_PRECOMMIT": + return SignedMsgType.SIGNED_MSG_TYPE_PRECOMMIT; + case 32: + case "SIGNED_MSG_TYPE_PROPOSAL": + return SignedMsgType.SIGNED_MSG_TYPE_PROPOSAL; + case -1: + case "UNRECOGNIZED": + default: + return SignedMsgType.UNRECOGNIZED; + } +} + +export function signedMsgTypeToJSON(object: SignedMsgType): string { + switch (object) { + case SignedMsgType.SIGNED_MSG_TYPE_UNKNOWN: + return "SIGNED_MSG_TYPE_UNKNOWN"; + case SignedMsgType.SIGNED_MSG_TYPE_PREVOTE: + return "SIGNED_MSG_TYPE_PREVOTE"; + case SignedMsgType.SIGNED_MSG_TYPE_PRECOMMIT: + return "SIGNED_MSG_TYPE_PRECOMMIT"; + case SignedMsgType.SIGNED_MSG_TYPE_PROPOSAL: + return "SIGNED_MSG_TYPE_PROPOSAL"; + case SignedMsgType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** PartsetHeader */ +export interface PartSetHeader { + total: number; + hash: Uint8Array; +} + +export interface Part { + index: number; + bytes: Uint8Array; + proof: Proof | undefined; +} + +/** BlockID */ +export interface BlockID { + hash: Uint8Array; + partSetHeader: PartSetHeader | undefined; +} + +/** Header defines the structure of a block header. */ +export interface Header { + /** basic block info */ + version: Consensus | undefined; + chainId: string; + height: number; + time: + | Date + | undefined; + /** prev block info */ + lastBlockId: + | BlockID + | undefined; + /** hashes of block data */ + lastCommitHash: Uint8Array; + /** transactions */ + dataHash: Uint8Array; + /** hashes from the app output from the prev block */ + validatorsHash: Uint8Array; + /** validators for the next block */ + nextValidatorsHash: Uint8Array; + /** consensus params for current block */ + consensusHash: Uint8Array; + /** state after txs from the previous block */ + appHash: Uint8Array; + /** root hash of all results from the txs from the previous block */ + lastResultsHash: Uint8Array; + /** consensus info */ + evidenceHash: Uint8Array; + /** original proposer of the block */ + proposerAddress: Uint8Array; +} + +/** Data contains the set of transactions included in the block */ +export interface Data { + /** + * Txs that will be applied by state @ block.Height+1. + * NOTE: not all txs here are valid. We're just agreeing on the order first. + * This means that block.AppHash does not include these txs. + */ + txs: Uint8Array[]; +} + +/** + * Vote represents a prevote, precommit, or commit vote from validators for + * consensus. + */ +export interface Vote { + type: SignedMsgType; + height: number; + round: number; + /** zero if vote is nil. */ + blockId: BlockID | undefined; + timestamp: Date | undefined; + validatorAddress: Uint8Array; + validatorIndex: number; + signature: Uint8Array; +} + +/** Commit contains the evidence that a block was committed by a set of validators. */ +export interface Commit { + height: number; + round: number; + blockId: BlockID | undefined; + signatures: CommitSig[]; +} + +/** CommitSig is a part of the Vote included in a Commit. */ +export interface CommitSig { + blockIdFlag: BlockIDFlag; + validatorAddress: Uint8Array; + timestamp: Date | undefined; + signature: Uint8Array; +} + +export interface Proposal { + type: SignedMsgType; + height: number; + round: number; + polRound: number; + blockId: BlockID | undefined; + timestamp: Date | undefined; + signature: Uint8Array; +} + +export interface SignedHeader { + header: Header | undefined; + commit: Commit | undefined; +} + +export interface LightBlock { + signedHeader: SignedHeader | undefined; + validatorSet: ValidatorSet | undefined; +} + +export interface BlockMeta { + blockId: BlockID | undefined; + blockSize: number; + header: Header | undefined; + numTxs: number; +} + +/** TxProof represents a Merkle proof of the presence of a transaction in the Merkle tree. */ +export interface TxProof { + rootHash: Uint8Array; + data: Uint8Array; + proof: Proof | undefined; +} + +function createBasePartSetHeader(): PartSetHeader { + return { total: 0, hash: new Uint8Array() }; +} + +export const PartSetHeader = { + encode(message: PartSetHeader, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.total !== 0) { + writer.uint32(8).uint32(message.total); + } + if (message.hash.length !== 0) { + writer.uint32(18).bytes(message.hash); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PartSetHeader { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePartSetHeader(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.total = reader.uint32(); + break; + case 2: + message.hash = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PartSetHeader { + return { + total: isSet(object.total) ? Number(object.total) : 0, + hash: isSet(object.hash) ? bytesFromBase64(object.hash) : new Uint8Array(), + }; + }, + + toJSON(message: PartSetHeader): unknown { + const obj: any = {}; + message.total !== undefined && (obj.total = Math.round(message.total)); + message.hash !== undefined + && (obj.hash = base64FromBytes(message.hash !== undefined ? message.hash : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): PartSetHeader { + const message = createBasePartSetHeader(); + message.total = object.total ?? 0; + message.hash = object.hash ?? new Uint8Array(); + return message; + }, +}; + +function createBasePart(): Part { + return { index: 0, bytes: new Uint8Array(), proof: undefined }; +} + +export const Part = { + encode(message: Part, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.index !== 0) { + writer.uint32(8).uint32(message.index); + } + if (message.bytes.length !== 0) { + writer.uint32(18).bytes(message.bytes); + } + if (message.proof !== undefined) { + Proof.encode(message.proof, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Part { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.index = reader.uint32(); + break; + case 2: + message.bytes = reader.bytes(); + break; + case 3: + message.proof = Proof.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Part { + return { + index: isSet(object.index) ? Number(object.index) : 0, + bytes: isSet(object.bytes) ? bytesFromBase64(object.bytes) : new Uint8Array(), + proof: isSet(object.proof) ? Proof.fromJSON(object.proof) : undefined, + }; + }, + + toJSON(message: Part): unknown { + const obj: any = {}; + message.index !== undefined && (obj.index = Math.round(message.index)); + message.bytes !== undefined + && (obj.bytes = base64FromBytes(message.bytes !== undefined ? message.bytes : new Uint8Array())); + message.proof !== undefined && (obj.proof = message.proof ? Proof.toJSON(message.proof) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): Part { + const message = createBasePart(); + message.index = object.index ?? 0; + message.bytes = object.bytes ?? new Uint8Array(); + message.proof = (object.proof !== undefined && object.proof !== null) ? Proof.fromPartial(object.proof) : undefined; + return message; + }, +}; + +function createBaseBlockID(): BlockID { + return { hash: new Uint8Array(), partSetHeader: undefined }; +} + +export const BlockID = { + encode(message: BlockID, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hash.length !== 0) { + writer.uint32(10).bytes(message.hash); + } + if (message.partSetHeader !== undefined) { + PartSetHeader.encode(message.partSetHeader, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BlockID { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockID(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.hash = reader.bytes(); + break; + case 2: + message.partSetHeader = PartSetHeader.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): BlockID { + return { + hash: isSet(object.hash) ? bytesFromBase64(object.hash) : new Uint8Array(), + partSetHeader: isSet(object.partSetHeader) ? PartSetHeader.fromJSON(object.partSetHeader) : undefined, + }; + }, + + toJSON(message: BlockID): unknown { + const obj: any = {}; + message.hash !== undefined + && (obj.hash = base64FromBytes(message.hash !== undefined ? message.hash : new Uint8Array())); + message.partSetHeader !== undefined + && (obj.partSetHeader = message.partSetHeader ? PartSetHeader.toJSON(message.partSetHeader) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): BlockID { + const message = createBaseBlockID(); + message.hash = object.hash ?? new Uint8Array(); + message.partSetHeader = (object.partSetHeader !== undefined && object.partSetHeader !== null) + ? PartSetHeader.fromPartial(object.partSetHeader) + : undefined; + return message; + }, +}; + +function createBaseHeader(): Header { + return { + version: undefined, + chainId: "", + height: 0, + time: undefined, + lastBlockId: undefined, + lastCommitHash: new Uint8Array(), + dataHash: new Uint8Array(), + validatorsHash: new Uint8Array(), + nextValidatorsHash: new Uint8Array(), + consensusHash: new Uint8Array(), + appHash: new Uint8Array(), + lastResultsHash: new Uint8Array(), + evidenceHash: new Uint8Array(), + proposerAddress: new Uint8Array(), + }; +} + +export const Header = { + encode(message: Header, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.version !== undefined) { + Consensus.encode(message.version, writer.uint32(10).fork()).ldelim(); + } + if (message.chainId !== "") { + writer.uint32(18).string(message.chainId); + } + if (message.height !== 0) { + writer.uint32(24).int64(message.height); + } + if (message.time !== undefined) { + Timestamp.encode(toTimestamp(message.time), writer.uint32(34).fork()).ldelim(); + } + if (message.lastBlockId !== undefined) { + BlockID.encode(message.lastBlockId, writer.uint32(42).fork()).ldelim(); + } + if (message.lastCommitHash.length !== 0) { + writer.uint32(50).bytes(message.lastCommitHash); + } + if (message.dataHash.length !== 0) { + writer.uint32(58).bytes(message.dataHash); + } + if (message.validatorsHash.length !== 0) { + writer.uint32(66).bytes(message.validatorsHash); + } + if (message.nextValidatorsHash.length !== 0) { + writer.uint32(74).bytes(message.nextValidatorsHash); + } + if (message.consensusHash.length !== 0) { + writer.uint32(82).bytes(message.consensusHash); + } + if (message.appHash.length !== 0) { + writer.uint32(90).bytes(message.appHash); + } + if (message.lastResultsHash.length !== 0) { + writer.uint32(98).bytes(message.lastResultsHash); + } + if (message.evidenceHash.length !== 0) { + writer.uint32(106).bytes(message.evidenceHash); + } + if (message.proposerAddress.length !== 0) { + writer.uint32(114).bytes(message.proposerAddress); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Header { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHeader(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.version = Consensus.decode(reader, reader.uint32()); + break; + case 2: + message.chainId = reader.string(); + break; + case 3: + message.height = longToNumber(reader.int64() as Long); + break; + case 4: + message.time = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 5: + message.lastBlockId = BlockID.decode(reader, reader.uint32()); + break; + case 6: + message.lastCommitHash = reader.bytes(); + break; + case 7: + message.dataHash = reader.bytes(); + break; + case 8: + message.validatorsHash = reader.bytes(); + break; + case 9: + message.nextValidatorsHash = reader.bytes(); + break; + case 10: + message.consensusHash = reader.bytes(); + break; + case 11: + message.appHash = reader.bytes(); + break; + case 12: + message.lastResultsHash = reader.bytes(); + break; + case 13: + message.evidenceHash = reader.bytes(); + break; + case 14: + message.proposerAddress = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Header { + return { + version: isSet(object.version) ? Consensus.fromJSON(object.version) : undefined, + chainId: isSet(object.chainId) ? String(object.chainId) : "", + height: isSet(object.height) ? Number(object.height) : 0, + time: isSet(object.time) ? fromJsonTimestamp(object.time) : undefined, + lastBlockId: isSet(object.lastBlockId) ? BlockID.fromJSON(object.lastBlockId) : undefined, + lastCommitHash: isSet(object.lastCommitHash) ? bytesFromBase64(object.lastCommitHash) : new Uint8Array(), + dataHash: isSet(object.dataHash) ? bytesFromBase64(object.dataHash) : new Uint8Array(), + validatorsHash: isSet(object.validatorsHash) ? bytesFromBase64(object.validatorsHash) : new Uint8Array(), + nextValidatorsHash: isSet(object.nextValidatorsHash) + ? bytesFromBase64(object.nextValidatorsHash) + : new Uint8Array(), + consensusHash: isSet(object.consensusHash) ? bytesFromBase64(object.consensusHash) : new Uint8Array(), + appHash: isSet(object.appHash) ? bytesFromBase64(object.appHash) : new Uint8Array(), + lastResultsHash: isSet(object.lastResultsHash) ? bytesFromBase64(object.lastResultsHash) : new Uint8Array(), + evidenceHash: isSet(object.evidenceHash) ? bytesFromBase64(object.evidenceHash) : new Uint8Array(), + proposerAddress: isSet(object.proposerAddress) ? bytesFromBase64(object.proposerAddress) : new Uint8Array(), + }; + }, + + toJSON(message: Header): unknown { + const obj: any = {}; + message.version !== undefined && (obj.version = message.version ? Consensus.toJSON(message.version) : undefined); + message.chainId !== undefined && (obj.chainId = message.chainId); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.time !== undefined && (obj.time = message.time.toISOString()); + message.lastBlockId !== undefined + && (obj.lastBlockId = message.lastBlockId ? BlockID.toJSON(message.lastBlockId) : undefined); + message.lastCommitHash !== undefined + && (obj.lastCommitHash = base64FromBytes( + message.lastCommitHash !== undefined ? message.lastCommitHash : new Uint8Array(), + )); + message.dataHash !== undefined + && (obj.dataHash = base64FromBytes(message.dataHash !== undefined ? message.dataHash : new Uint8Array())); + message.validatorsHash !== undefined + && (obj.validatorsHash = base64FromBytes( + message.validatorsHash !== undefined ? message.validatorsHash : new Uint8Array(), + )); + message.nextValidatorsHash !== undefined + && (obj.nextValidatorsHash = base64FromBytes( + message.nextValidatorsHash !== undefined ? message.nextValidatorsHash : new Uint8Array(), + )); + message.consensusHash !== undefined + && (obj.consensusHash = base64FromBytes( + message.consensusHash !== undefined ? message.consensusHash : new Uint8Array(), + )); + message.appHash !== undefined + && (obj.appHash = base64FromBytes(message.appHash !== undefined ? message.appHash : new Uint8Array())); + message.lastResultsHash !== undefined + && (obj.lastResultsHash = base64FromBytes( + message.lastResultsHash !== undefined ? message.lastResultsHash : new Uint8Array(), + )); + message.evidenceHash !== undefined + && (obj.evidenceHash = base64FromBytes( + message.evidenceHash !== undefined ? message.evidenceHash : new Uint8Array(), + )); + message.proposerAddress !== undefined + && (obj.proposerAddress = base64FromBytes( + message.proposerAddress !== undefined ? message.proposerAddress : new Uint8Array(), + )); + return obj; + }, + + fromPartial, I>>(object: I): Header { + const message = createBaseHeader(); + message.version = (object.version !== undefined && object.version !== null) + ? Consensus.fromPartial(object.version) + : undefined; + message.chainId = object.chainId ?? ""; + message.height = object.height ?? 0; + message.time = object.time ?? undefined; + message.lastBlockId = (object.lastBlockId !== undefined && object.lastBlockId !== null) + ? BlockID.fromPartial(object.lastBlockId) + : undefined; + message.lastCommitHash = object.lastCommitHash ?? new Uint8Array(); + message.dataHash = object.dataHash ?? new Uint8Array(); + message.validatorsHash = object.validatorsHash ?? new Uint8Array(); + message.nextValidatorsHash = object.nextValidatorsHash ?? new Uint8Array(); + message.consensusHash = object.consensusHash ?? new Uint8Array(); + message.appHash = object.appHash ?? new Uint8Array(); + message.lastResultsHash = object.lastResultsHash ?? new Uint8Array(); + message.evidenceHash = object.evidenceHash ?? new Uint8Array(); + message.proposerAddress = object.proposerAddress ?? new Uint8Array(); + return message; + }, +}; + +function createBaseData(): Data { + return { txs: [] }; +} + +export const Data = { + encode(message: Data, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.txs) { + writer.uint32(10).bytes(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Data { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseData(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.txs.push(reader.bytes()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Data { + return { txs: Array.isArray(object?.txs) ? object.txs.map((e: any) => bytesFromBase64(e)) : [] }; + }, + + toJSON(message: Data): unknown { + const obj: any = {}; + if (message.txs) { + obj.txs = message.txs.map((e) => base64FromBytes(e !== undefined ? e : new Uint8Array())); + } else { + obj.txs = []; + } + return obj; + }, + + fromPartial, I>>(object: I): Data { + const message = createBaseData(); + message.txs = object.txs?.map((e) => e) || []; + return message; + }, +}; + +function createBaseVote(): Vote { + return { + type: 0, + height: 0, + round: 0, + blockId: undefined, + timestamp: undefined, + validatorAddress: new Uint8Array(), + validatorIndex: 0, + signature: new Uint8Array(), + }; +} + +export const Vote = { + encode(message: Vote, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== 0) { + writer.uint32(8).int32(message.type); + } + if (message.height !== 0) { + writer.uint32(16).int64(message.height); + } + if (message.round !== 0) { + writer.uint32(24).int32(message.round); + } + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(34).fork()).ldelim(); + } + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(42).fork()).ldelim(); + } + if (message.validatorAddress.length !== 0) { + writer.uint32(50).bytes(message.validatorAddress); + } + if (message.validatorIndex !== 0) { + writer.uint32(56).int32(message.validatorIndex); + } + if (message.signature.length !== 0) { + writer.uint32(66).bytes(message.signature); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Vote { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVote(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.type = reader.int32() as any; + break; + case 2: + message.height = longToNumber(reader.int64() as Long); + break; + case 3: + message.round = reader.int32(); + break; + case 4: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + case 5: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 6: + message.validatorAddress = reader.bytes(); + break; + case 7: + message.validatorIndex = reader.int32(); + break; + case 8: + message.signature = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Vote { + return { + type: isSet(object.type) ? signedMsgTypeFromJSON(object.type) : 0, + height: isSet(object.height) ? Number(object.height) : 0, + round: isSet(object.round) ? Number(object.round) : 0, + blockId: isSet(object.blockId) ? BlockID.fromJSON(object.blockId) : undefined, + timestamp: isSet(object.timestamp) ? fromJsonTimestamp(object.timestamp) : undefined, + validatorAddress: isSet(object.validatorAddress) ? bytesFromBase64(object.validatorAddress) : new Uint8Array(), + validatorIndex: isSet(object.validatorIndex) ? Number(object.validatorIndex) : 0, + signature: isSet(object.signature) ? bytesFromBase64(object.signature) : new Uint8Array(), + }; + }, + + toJSON(message: Vote): unknown { + const obj: any = {}; + message.type !== undefined && (obj.type = signedMsgTypeToJSON(message.type)); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.round !== undefined && (obj.round = Math.round(message.round)); + message.blockId !== undefined && (obj.blockId = message.blockId ? BlockID.toJSON(message.blockId) : undefined); + message.timestamp !== undefined && (obj.timestamp = message.timestamp.toISOString()); + message.validatorAddress !== undefined + && (obj.validatorAddress = base64FromBytes( + message.validatorAddress !== undefined ? message.validatorAddress : new Uint8Array(), + )); + message.validatorIndex !== undefined && (obj.validatorIndex = Math.round(message.validatorIndex)); + message.signature !== undefined + && (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): Vote { + const message = createBaseVote(); + message.type = object.type ?? 0; + message.height = object.height ?? 0; + message.round = object.round ?? 0; + message.blockId = (object.blockId !== undefined && object.blockId !== null) + ? BlockID.fromPartial(object.blockId) + : undefined; + message.timestamp = object.timestamp ?? undefined; + message.validatorAddress = object.validatorAddress ?? new Uint8Array(); + message.validatorIndex = object.validatorIndex ?? 0; + message.signature = object.signature ?? new Uint8Array(); + return message; + }, +}; + +function createBaseCommit(): Commit { + return { height: 0, round: 0, blockId: undefined, signatures: [] }; +} + +export const Commit = { + encode(message: Commit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.height !== 0) { + writer.uint32(8).int64(message.height); + } + if (message.round !== 0) { + writer.uint32(16).int32(message.round); + } + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.signatures) { + CommitSig.encode(v!, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Commit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommit(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.height = longToNumber(reader.int64() as Long); + break; + case 2: + message.round = reader.int32(); + break; + case 3: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + case 4: + message.signatures.push(CommitSig.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Commit { + return { + height: isSet(object.height) ? Number(object.height) : 0, + round: isSet(object.round) ? Number(object.round) : 0, + blockId: isSet(object.blockId) ? BlockID.fromJSON(object.blockId) : undefined, + signatures: Array.isArray(object?.signatures) ? object.signatures.map((e: any) => CommitSig.fromJSON(e)) : [], + }; + }, + + toJSON(message: Commit): unknown { + const obj: any = {}; + message.height !== undefined && (obj.height = Math.round(message.height)); + message.round !== undefined && (obj.round = Math.round(message.round)); + message.blockId !== undefined && (obj.blockId = message.blockId ? BlockID.toJSON(message.blockId) : undefined); + if (message.signatures) { + obj.signatures = message.signatures.map((e) => e ? CommitSig.toJSON(e) : undefined); + } else { + obj.signatures = []; + } + return obj; + }, + + fromPartial, I>>(object: I): Commit { + const message = createBaseCommit(); + message.height = object.height ?? 0; + message.round = object.round ?? 0; + message.blockId = (object.blockId !== undefined && object.blockId !== null) + ? BlockID.fromPartial(object.blockId) + : undefined; + message.signatures = object.signatures?.map((e) => CommitSig.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCommitSig(): CommitSig { + return { blockIdFlag: 0, validatorAddress: new Uint8Array(), timestamp: undefined, signature: new Uint8Array() }; +} + +export const CommitSig = { + encode(message: CommitSig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockIdFlag !== 0) { + writer.uint32(8).int32(message.blockIdFlag); + } + if (message.validatorAddress.length !== 0) { + writer.uint32(18).bytes(message.validatorAddress); + } + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(26).fork()).ldelim(); + } + if (message.signature.length !== 0) { + writer.uint32(34).bytes(message.signature); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CommitSig { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommitSig(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.blockIdFlag = reader.int32() as any; + break; + case 2: + message.validatorAddress = reader.bytes(); + break; + case 3: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 4: + message.signature = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CommitSig { + return { + blockIdFlag: isSet(object.blockIdFlag) ? blockIDFlagFromJSON(object.blockIdFlag) : 0, + validatorAddress: isSet(object.validatorAddress) ? bytesFromBase64(object.validatorAddress) : new Uint8Array(), + timestamp: isSet(object.timestamp) ? fromJsonTimestamp(object.timestamp) : undefined, + signature: isSet(object.signature) ? bytesFromBase64(object.signature) : new Uint8Array(), + }; + }, + + toJSON(message: CommitSig): unknown { + const obj: any = {}; + message.blockIdFlag !== undefined && (obj.blockIdFlag = blockIDFlagToJSON(message.blockIdFlag)); + message.validatorAddress !== undefined + && (obj.validatorAddress = base64FromBytes( + message.validatorAddress !== undefined ? message.validatorAddress : new Uint8Array(), + )); + message.timestamp !== undefined && (obj.timestamp = message.timestamp.toISOString()); + message.signature !== undefined + && (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): CommitSig { + const message = createBaseCommitSig(); + message.blockIdFlag = object.blockIdFlag ?? 0; + message.validatorAddress = object.validatorAddress ?? new Uint8Array(); + message.timestamp = object.timestamp ?? undefined; + message.signature = object.signature ?? new Uint8Array(); + return message; + }, +}; + +function createBaseProposal(): Proposal { + return { + type: 0, + height: 0, + round: 0, + polRound: 0, + blockId: undefined, + timestamp: undefined, + signature: new Uint8Array(), + }; +} + +export const Proposal = { + encode(message: Proposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== 0) { + writer.uint32(8).int32(message.type); + } + if (message.height !== 0) { + writer.uint32(16).int64(message.height); + } + if (message.round !== 0) { + writer.uint32(24).int32(message.round); + } + if (message.polRound !== 0) { + writer.uint32(32).int32(message.polRound); + } + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(42).fork()).ldelim(); + } + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(50).fork()).ldelim(); + } + if (message.signature.length !== 0) { + writer.uint32(58).bytes(message.signature); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Proposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.type = reader.int32() as any; + break; + case 2: + message.height = longToNumber(reader.int64() as Long); + break; + case 3: + message.round = reader.int32(); + break; + case 4: + message.polRound = reader.int32(); + break; + case 5: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + case 6: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 7: + message.signature = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Proposal { + return { + type: isSet(object.type) ? signedMsgTypeFromJSON(object.type) : 0, + height: isSet(object.height) ? Number(object.height) : 0, + round: isSet(object.round) ? Number(object.round) : 0, + polRound: isSet(object.polRound) ? Number(object.polRound) : 0, + blockId: isSet(object.blockId) ? BlockID.fromJSON(object.blockId) : undefined, + timestamp: isSet(object.timestamp) ? fromJsonTimestamp(object.timestamp) : undefined, + signature: isSet(object.signature) ? bytesFromBase64(object.signature) : new Uint8Array(), + }; + }, + + toJSON(message: Proposal): unknown { + const obj: any = {}; + message.type !== undefined && (obj.type = signedMsgTypeToJSON(message.type)); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.round !== undefined && (obj.round = Math.round(message.round)); + message.polRound !== undefined && (obj.polRound = Math.round(message.polRound)); + message.blockId !== undefined && (obj.blockId = message.blockId ? BlockID.toJSON(message.blockId) : undefined); + message.timestamp !== undefined && (obj.timestamp = message.timestamp.toISOString()); + message.signature !== undefined + && (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): Proposal { + const message = createBaseProposal(); + message.type = object.type ?? 0; + message.height = object.height ?? 0; + message.round = object.round ?? 0; + message.polRound = object.polRound ?? 0; + message.blockId = (object.blockId !== undefined && object.blockId !== null) + ? BlockID.fromPartial(object.blockId) + : undefined; + message.timestamp = object.timestamp ?? undefined; + message.signature = object.signature ?? new Uint8Array(); + return message; + }, +}; + +function createBaseSignedHeader(): SignedHeader { + return { header: undefined, commit: undefined }; +} + +export const SignedHeader = { + encode(message: SignedHeader, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.header !== undefined) { + Header.encode(message.header, writer.uint32(10).fork()).ldelim(); + } + if (message.commit !== undefined) { + Commit.encode(message.commit, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignedHeader { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignedHeader(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.header = Header.decode(reader, reader.uint32()); + break; + case 2: + message.commit = Commit.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SignedHeader { + return { + header: isSet(object.header) ? Header.fromJSON(object.header) : undefined, + commit: isSet(object.commit) ? Commit.fromJSON(object.commit) : undefined, + }; + }, + + toJSON(message: SignedHeader): unknown { + const obj: any = {}; + message.header !== undefined && (obj.header = message.header ? Header.toJSON(message.header) : undefined); + message.commit !== undefined && (obj.commit = message.commit ? Commit.toJSON(message.commit) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): SignedHeader { + const message = createBaseSignedHeader(); + message.header = (object.header !== undefined && object.header !== null) + ? Header.fromPartial(object.header) + : undefined; + message.commit = (object.commit !== undefined && object.commit !== null) + ? Commit.fromPartial(object.commit) + : undefined; + return message; + }, +}; + +function createBaseLightBlock(): LightBlock { + return { signedHeader: undefined, validatorSet: undefined }; +} + +export const LightBlock = { + encode(message: LightBlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signedHeader !== undefined) { + SignedHeader.encode(message.signedHeader, writer.uint32(10).fork()).ldelim(); + } + if (message.validatorSet !== undefined) { + ValidatorSet.encode(message.validatorSet, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): LightBlock { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseLightBlock(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signedHeader = SignedHeader.decode(reader, reader.uint32()); + break; + case 2: + message.validatorSet = ValidatorSet.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): LightBlock { + return { + signedHeader: isSet(object.signedHeader) ? SignedHeader.fromJSON(object.signedHeader) : undefined, + validatorSet: isSet(object.validatorSet) ? ValidatorSet.fromJSON(object.validatorSet) : undefined, + }; + }, + + toJSON(message: LightBlock): unknown { + const obj: any = {}; + message.signedHeader !== undefined + && (obj.signedHeader = message.signedHeader ? SignedHeader.toJSON(message.signedHeader) : undefined); + message.validatorSet !== undefined + && (obj.validatorSet = message.validatorSet ? ValidatorSet.toJSON(message.validatorSet) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): LightBlock { + const message = createBaseLightBlock(); + message.signedHeader = (object.signedHeader !== undefined && object.signedHeader !== null) + ? SignedHeader.fromPartial(object.signedHeader) + : undefined; + message.validatorSet = (object.validatorSet !== undefined && object.validatorSet !== null) + ? ValidatorSet.fromPartial(object.validatorSet) + : undefined; + return message; + }, +}; + +function createBaseBlockMeta(): BlockMeta { + return { blockId: undefined, blockSize: 0, header: undefined, numTxs: 0 }; +} + +export const BlockMeta = { + encode(message: BlockMeta, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(10).fork()).ldelim(); + } + if (message.blockSize !== 0) { + writer.uint32(16).int64(message.blockSize); + } + if (message.header !== undefined) { + Header.encode(message.header, writer.uint32(26).fork()).ldelim(); + } + if (message.numTxs !== 0) { + writer.uint32(32).int64(message.numTxs); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BlockMeta { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockMeta(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + case 2: + message.blockSize = longToNumber(reader.int64() as Long); + break; + case 3: + message.header = Header.decode(reader, reader.uint32()); + break; + case 4: + message.numTxs = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): BlockMeta { + return { + blockId: isSet(object.blockId) ? BlockID.fromJSON(object.blockId) : undefined, + blockSize: isSet(object.blockSize) ? Number(object.blockSize) : 0, + header: isSet(object.header) ? Header.fromJSON(object.header) : undefined, + numTxs: isSet(object.numTxs) ? Number(object.numTxs) : 0, + }; + }, + + toJSON(message: BlockMeta): unknown { + const obj: any = {}; + message.blockId !== undefined && (obj.blockId = message.blockId ? BlockID.toJSON(message.blockId) : undefined); + message.blockSize !== undefined && (obj.blockSize = Math.round(message.blockSize)); + message.header !== undefined && (obj.header = message.header ? Header.toJSON(message.header) : undefined); + message.numTxs !== undefined && (obj.numTxs = Math.round(message.numTxs)); + return obj; + }, + + fromPartial, I>>(object: I): BlockMeta { + const message = createBaseBlockMeta(); + message.blockId = (object.blockId !== undefined && object.blockId !== null) + ? BlockID.fromPartial(object.blockId) + : undefined; + message.blockSize = object.blockSize ?? 0; + message.header = (object.header !== undefined && object.header !== null) + ? Header.fromPartial(object.header) + : undefined; + message.numTxs = object.numTxs ?? 0; + return message; + }, +}; + +function createBaseTxProof(): TxProof { + return { rootHash: new Uint8Array(), data: new Uint8Array(), proof: undefined }; +} + +export const TxProof = { + encode(message: TxProof, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.rootHash.length !== 0) { + writer.uint32(10).bytes(message.rootHash); + } + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + if (message.proof !== undefined) { + Proof.encode(message.proof, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxProof { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxProof(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rootHash = reader.bytes(); + break; + case 2: + message.data = reader.bytes(); + break; + case 3: + message.proof = Proof.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxProof { + return { + rootHash: isSet(object.rootHash) ? bytesFromBase64(object.rootHash) : new Uint8Array(), + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(), + proof: isSet(object.proof) ? Proof.fromJSON(object.proof) : undefined, + }; + }, + + toJSON(message: TxProof): unknown { + const obj: any = {}; + message.rootHash !== undefined + && (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : new Uint8Array())); + message.data !== undefined + && (obj.data = base64FromBytes(message.data !== undefined ? message.data : new Uint8Array())); + message.proof !== undefined && (obj.proof = message.proof ? Proof.toJSON(message.proof) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): TxProof { + const message = createBaseTxProof(); + message.rootHash = object.rootHash ?? new Uint8Array(); + message.data = object.data ?? new Uint8Array(); + message.proof = (object.proof !== undefined && object.proof !== null) ? Proof.fromPartial(object.proof) : undefined; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function toTimestamp(date: Date): Timestamp { + const seconds = date.getTime() / 1_000; + const nanos = (date.getTime() % 1_000) * 1_000_000; + return { seconds, nanos }; +} + +function fromTimestamp(t: Timestamp): Date { + let millis = t.seconds * 1_000; + millis += t.nanos / 1_000_000; + return new Date(millis); +} + +function fromJsonTimestamp(o: any): Date { + if (o instanceof Date) { + return o; + } else if (typeof o === "string") { + return new Date(o); + } else { + return fromTimestamp(Timestamp.fromJSON(o)); + } +} + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/types/validator.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/types/validator.ts new file mode 100644 index 000000000..690845836 --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/types/validator.ts @@ -0,0 +1,308 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { PublicKey } from "../crypto/keys"; + +export const protobufPackage = "tendermint.types"; + +export interface ValidatorSet { + validators: Validator[]; + proposer: Validator | undefined; + totalVotingPower: number; +} + +export interface Validator { + address: Uint8Array; + pubKey: PublicKey | undefined; + votingPower: number; + proposerPriority: number; +} + +export interface SimpleValidator { + pubKey: PublicKey | undefined; + votingPower: number; +} + +function createBaseValidatorSet(): ValidatorSet { + return { validators: [], proposer: undefined, totalVotingPower: 0 }; +} + +export const ValidatorSet = { + encode(message: ValidatorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.validators) { + Validator.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.proposer !== undefined) { + Validator.encode(message.proposer, writer.uint32(18).fork()).ldelim(); + } + if (message.totalVotingPower !== 0) { + writer.uint32(24).int64(message.totalVotingPower); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.validators.push(Validator.decode(reader, reader.uint32())); + break; + case 2: + message.proposer = Validator.decode(reader, reader.uint32()); + break; + case 3: + message.totalVotingPower = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ValidatorSet { + return { + validators: Array.isArray(object?.validators) ? object.validators.map((e: any) => Validator.fromJSON(e)) : [], + proposer: isSet(object.proposer) ? Validator.fromJSON(object.proposer) : undefined, + totalVotingPower: isSet(object.totalVotingPower) ? Number(object.totalVotingPower) : 0, + }; + }, + + toJSON(message: ValidatorSet): unknown { + const obj: any = {}; + if (message.validators) { + obj.validators = message.validators.map((e) => e ? Validator.toJSON(e) : undefined); + } else { + obj.validators = []; + } + message.proposer !== undefined + && (obj.proposer = message.proposer ? Validator.toJSON(message.proposer) : undefined); + message.totalVotingPower !== undefined && (obj.totalVotingPower = Math.round(message.totalVotingPower)); + return obj; + }, + + fromPartial, I>>(object: I): ValidatorSet { + const message = createBaseValidatorSet(); + message.validators = object.validators?.map((e) => Validator.fromPartial(e)) || []; + message.proposer = (object.proposer !== undefined && object.proposer !== null) + ? Validator.fromPartial(object.proposer) + : undefined; + message.totalVotingPower = object.totalVotingPower ?? 0; + return message; + }, +}; + +function createBaseValidator(): Validator { + return { address: new Uint8Array(), pubKey: undefined, votingPower: 0, proposerPriority: 0 }; +} + +export const Validator = { + encode(message: Validator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address.length !== 0) { + writer.uint32(10).bytes(message.address); + } + if (message.pubKey !== undefined) { + PublicKey.encode(message.pubKey, writer.uint32(18).fork()).ldelim(); + } + if (message.votingPower !== 0) { + writer.uint32(24).int64(message.votingPower); + } + if (message.proposerPriority !== 0) { + writer.uint32(32).int64(message.proposerPriority); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Validator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.bytes(); + break; + case 2: + message.pubKey = PublicKey.decode(reader, reader.uint32()); + break; + case 3: + message.votingPower = longToNumber(reader.int64() as Long); + break; + case 4: + message.proposerPriority = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Validator { + return { + address: isSet(object.address) ? bytesFromBase64(object.address) : new Uint8Array(), + pubKey: isSet(object.pubKey) ? PublicKey.fromJSON(object.pubKey) : undefined, + votingPower: isSet(object.votingPower) ? Number(object.votingPower) : 0, + proposerPriority: isSet(object.proposerPriority) ? Number(object.proposerPriority) : 0, + }; + }, + + toJSON(message: Validator): unknown { + const obj: any = {}; + message.address !== undefined + && (obj.address = base64FromBytes(message.address !== undefined ? message.address : new Uint8Array())); + message.pubKey !== undefined && (obj.pubKey = message.pubKey ? PublicKey.toJSON(message.pubKey) : undefined); + message.votingPower !== undefined && (obj.votingPower = Math.round(message.votingPower)); + message.proposerPriority !== undefined && (obj.proposerPriority = Math.round(message.proposerPriority)); + return obj; + }, + + fromPartial, I>>(object: I): Validator { + const message = createBaseValidator(); + message.address = object.address ?? new Uint8Array(); + message.pubKey = (object.pubKey !== undefined && object.pubKey !== null) + ? PublicKey.fromPartial(object.pubKey) + : undefined; + message.votingPower = object.votingPower ?? 0; + message.proposerPriority = object.proposerPriority ?? 0; + return message; + }, +}; + +function createBaseSimpleValidator(): SimpleValidator { + return { pubKey: undefined, votingPower: 0 }; +} + +export const SimpleValidator = { + encode(message: SimpleValidator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pubKey !== undefined) { + PublicKey.encode(message.pubKey, writer.uint32(10).fork()).ldelim(); + } + if (message.votingPower !== 0) { + writer.uint32(16).int64(message.votingPower); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SimpleValidator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSimpleValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pubKey = PublicKey.decode(reader, reader.uint32()); + break; + case 2: + message.votingPower = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SimpleValidator { + return { + pubKey: isSet(object.pubKey) ? PublicKey.fromJSON(object.pubKey) : undefined, + votingPower: isSet(object.votingPower) ? Number(object.votingPower) : 0, + }; + }, + + toJSON(message: SimpleValidator): unknown { + const obj: any = {}; + message.pubKey !== undefined && (obj.pubKey = message.pubKey ? PublicKey.toJSON(message.pubKey) : undefined); + message.votingPower !== undefined && (obj.votingPower = Math.round(message.votingPower)); + return obj; + }, + + fromPartial, I>>(object: I): SimpleValidator { + const message = createBaseSimpleValidator(); + message.pubKey = (object.pubKey !== undefined && object.pubKey !== null) + ? PublicKey.fromPartial(object.pubKey) + : undefined; + message.votingPower = object.votingPower ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/version/types.ts b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/version/types.ts new file mode 100644 index 000000000..f326bec12 --- /dev/null +++ b/ts-client/cosmos.base.tendermint.v1beta1/types/tendermint/version/types.ts @@ -0,0 +1,184 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "tendermint.version"; + +/** + * App includes the protocol and software version for the application. + * This information is included in ResponseInfo. The App.Protocol can be + * updated in ResponseEndBlock. + */ +export interface App { + protocol: number; + software: string; +} + +/** + * Consensus captures the consensus rules for processing a block in the blockchain, + * including all blockchain data structures and the rules of the application's + * state transition machine. + */ +export interface Consensus { + block: number; + app: number; +} + +function createBaseApp(): App { + return { protocol: 0, software: "" }; +} + +export const App = { + encode(message: App, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.protocol !== 0) { + writer.uint32(8).uint64(message.protocol); + } + if (message.software !== "") { + writer.uint32(18).string(message.software); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): App { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseApp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.protocol = longToNumber(reader.uint64() as Long); + break; + case 2: + message.software = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): App { + return { + protocol: isSet(object.protocol) ? Number(object.protocol) : 0, + software: isSet(object.software) ? String(object.software) : "", + }; + }, + + toJSON(message: App): unknown { + const obj: any = {}; + message.protocol !== undefined && (obj.protocol = Math.round(message.protocol)); + message.software !== undefined && (obj.software = message.software); + return obj; + }, + + fromPartial, I>>(object: I): App { + const message = createBaseApp(); + message.protocol = object.protocol ?? 0; + message.software = object.software ?? ""; + return message; + }, +}; + +function createBaseConsensus(): Consensus { + return { block: 0, app: 0 }; +} + +export const Consensus = { + encode(message: Consensus, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.block !== 0) { + writer.uint32(8).uint64(message.block); + } + if (message.app !== 0) { + writer.uint32(16).uint64(message.app); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Consensus { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConsensus(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.block = longToNumber(reader.uint64() as Long); + break; + case 2: + message.app = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Consensus { + return { block: isSet(object.block) ? Number(object.block) : 0, app: isSet(object.app) ? Number(object.app) : 0 }; + }, + + toJSON(message: Consensus): unknown { + const obj: any = {}; + message.block !== undefined && (obj.block = Math.round(message.block)); + message.app !== undefined && (obj.app = Math.round(message.app)); + return obj; + }, + + fromPartial, I>>(object: I): Consensus { + const message = createBaseConsensus(); + message.block = object.block ?? 0; + message.app = object.app ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.consensus.v1/api.swagger.yml b/ts-client/cosmos.consensus.v1/api.swagger.yml new file mode 100644 index 000000000..6a458686e --- /dev/null +++ b/ts-client/cosmos.consensus.v1/api.swagger.yml @@ -0,0 +1,204 @@ +swagger: '2.0' +info: + title: HTTP API Console cosmos.consensus.v1 + name: '' + description: '' +paths: + /cosmos/consensus/v1/params: + get: + operationId: Params + responses: + '200': + description: A successful response. + schema: + type: object + properties: + params: + type: object + properties: + block: + type: object + properties: + max_bytes: + type: string + format: int64 + max_gas: + type: string + format: int64 + evidence: + type: object + properties: + max_age_num_blocks: + type: string + format: int64 + max_age_duration: + type: string + max_bytes: + type: string + format: int64 + validator: + type: object + properties: + pub_key_types: + type: array + items: + type: string + version: + type: object + properties: + app: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + tags: + - Query +definitions: + Any: + type: object + properties: + '@type': + type: string + additionalProperties: {} + BlockParams: + type: object + properties: + max_bytes: + type: string + format: int64 + max_gas: + type: string + format: int64 + ConsensusParams: + type: object + properties: + block: + type: object + properties: + max_bytes: + type: string + format: int64 + max_gas: + type: string + format: int64 + evidence: + type: object + properties: + max_age_num_blocks: + type: string + format: int64 + max_age_duration: + type: string + max_bytes: + type: string + format: int64 + validator: + type: object + properties: + pub_key_types: + type: array + items: + type: string + version: + type: object + properties: + app: + type: string + format: uint64 + EvidenceParams: + type: object + properties: + max_age_num_blocks: + type: string + format: int64 + max_age_duration: + type: string + max_bytes: + type: string + format: int64 + QueryParamsResponse: + type: object + properties: + params: + type: object + properties: + block: + type: object + properties: + max_bytes: + type: string + format: int64 + max_gas: + type: string + format: int64 + evidence: + type: object + properties: + max_age_num_blocks: + type: string + format: int64 + max_age_duration: + type: string + max_bytes: + type: string + format: int64 + validator: + type: object + properties: + pub_key_types: + type: array + items: + type: string + version: + type: object + properties: + app: + type: string + format: uint64 + Status: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + ValidatorParams: + type: object + properties: + pub_key_types: + type: array + items: + type: string + VersionParams: + type: object + properties: + app: + type: string + format: uint64 + MsgUpdateParamsResponse: + type: object diff --git a/ts-client/cosmos.consensus.v1/index.ts b/ts-client/cosmos.consensus.v1/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/cosmos.consensus.v1/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/cosmos.consensus.v1/module.ts b/ts-client/cosmos.consensus.v1/module.ts new file mode 100755 index 000000000..73e5e8eb4 --- /dev/null +++ b/ts-client/cosmos.consensus.v1/module.ts @@ -0,0 +1,129 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; +import { MsgUpdateParams } from "./types/cosmos/consensus/v1/tx"; + + +export { MsgUpdateParams }; + +type sendMsgUpdateParamsParams = { + value: MsgUpdateParams, + fee?: StdFee, + memo?: string +}; + + +type msgUpdateParamsParams = { + value: MsgUpdateParams, +}; + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + async sendMsgUpdateParams({ value, fee, memo }: sendMsgUpdateParamsParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgUpdateParams: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgUpdateParams({ value: MsgUpdateParams.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgUpdateParams: Could not broadcast Tx: '+ e.message) + } + }, + + + msgUpdateParams({ value }: msgUpdateParamsParams): EncodeObject { + try { + return { typeUrl: "/cosmos.consensus.v1.MsgUpdateParams", value: MsgUpdateParams.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgUpdateParams: Could not create message: ' + e.message) + } + }, + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + CosmosConsensusV1: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/cosmos.consensus.v1/registry.ts b/ts-client/cosmos.consensus.v1/registry.ts new file mode 100755 index 000000000..306a6aeca --- /dev/null +++ b/ts-client/cosmos.consensus.v1/registry.ts @@ -0,0 +1,9 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; +import { MsgUpdateParams } from "./types/cosmos/consensus/v1/tx"; + +const msgTypes: Array<[string, GeneratedType]> = [ + ["/cosmos.consensus.v1.MsgUpdateParams", MsgUpdateParams], + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/cosmos.consensus.v1/rest.ts b/ts-client/cosmos.consensus.v1/rest.ts new file mode 100644 index 000000000..15b5f6ab8 --- /dev/null +++ b/ts-client/cosmos.consensus.v1/rest.ts @@ -0,0 +1,268 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +export interface ProtobufAny { + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +/** + * BlockParams contains limits on the block size. + */ +export interface TypesBlockParams { + /** + * Max block size, in bytes. + * Note: must be greater than 0 + * @format int64 + */ + max_bytes?: string; + + /** + * Max gas per block. + * Note: must be greater or equal to -1 + * @format int64 + */ + max_gas?: string; +} + +/** +* ConsensusParams contains consensus critical parameters that determine the +validity of blocks. +*/ +export interface TypesConsensusParams { + /** BlockParams contains limits on the block size. */ + block?: TypesBlockParams; + + /** EvidenceParams determine how we handle evidence of malfeasance. */ + evidence?: TypesEvidenceParams; + + /** + * ValidatorParams restrict the public key types validators can use. + * NOTE: uses ABCI pubkey naming, not Amino names. + */ + validator?: TypesValidatorParams; + + /** VersionParams contains the ABCI application version. */ + version?: TypesVersionParams; +} + +/** + * EvidenceParams determine how we handle evidence of malfeasance. + */ +export interface TypesEvidenceParams { + /** + * Max age of evidence, in blocks. + * + * The basic formula for calculating this is: MaxAgeDuration / {average block + * time}. + * @format int64 + */ + max_age_num_blocks?: string; + + /** + * Max age of evidence, in time. + * + * It should correspond with an app's "unbonding period" or other similar + * mechanism for handling [Nothing-At-Stake + * attacks](https://github.com/ethereum/wiki/wiki/Proof-of-Stake-FAQ#what-is-the-nothing-at-stake-problem-and-how-can-it-be-fixed). + */ + max_age_duration?: string; + + /** + * This sets the maximum size of total evidence in bytes that can be committed in a single block. + * and should fall comfortably under the max block bytes. + * Default is 1048576 or 1MB + * @format int64 + */ + max_bytes?: string; +} + +/** +* ValidatorParams restrict the public key types validators can use. +NOTE: uses ABCI pubkey naming, not Amino names. +*/ +export interface TypesValidatorParams { + pub_key_types?: string[]; +} + +/** + * VersionParams contains the ABCI application version. + */ +export interface TypesVersionParams { + /** @format uint64 */ + app?: string; +} + +/** +* MsgUpdateParamsResponse defines the response structure for executing a +MsgUpdateParams message. +*/ +export type V1MsgUpdateParamsResponse = object; + +/** + * QueryParamsResponse defines the response type for querying x/consensus parameters. + */ +export interface V1QueryParamsResponse { + /** + * params are the tendermint consensus params stored in the consensus module. + * Please note that `params.version` is not populated in this response, it is + * tracked separately in the x/upgrade module. + */ + params?: TypesConsensusParams; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title cosmos/consensus/v1/query.proto + * @version version not set + */ +export class Api extends HttpClient { + /** + * No description + * + * @tags Query + * @name QueryParams + * @summary Params queries the parameters of x/consensus_param module. + * @request GET:/cosmos/consensus/v1/params + */ + queryParams = (params: RequestParams = {}) => + this.request({ + path: `/cosmos/consensus/v1/params`, + method: "GET", + format: "json", + ...params, + }); +} diff --git a/ts-client/cosmos.consensus.v1/types.ts b/ts-client/cosmos.consensus.v1/types.ts new file mode 100755 index 000000000..fbb084a7e --- /dev/null +++ b/ts-client/cosmos.consensus.v1/types.ts @@ -0,0 +1,5 @@ + + +export { + + } \ No newline at end of file diff --git a/ts-client/cosmos.consensus.v1/types/cosmos/consensus/v1/query.ts b/ts-client/cosmos.consensus.v1/types/cosmos/consensus/v1/query.ts new file mode 100644 index 000000000..f1ee02a77 --- /dev/null +++ b/ts-client/cosmos.consensus.v1/types/cosmos/consensus/v1/query.ts @@ -0,0 +1,147 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { ConsensusParams } from "../../../tendermint/types/params"; + +export const protobufPackage = "cosmos.consensus.v1"; + +/** Since: cosmos-sdk 0.47 */ + +/** QueryParamsRequest defines the request type for querying x/consensus parameters. */ +export interface QueryParamsRequest { +} + +/** QueryParamsResponse defines the response type for querying x/consensus parameters. */ +export interface QueryParamsResponse { + /** + * params are the tendermint consensus params stored in the consensus module. + * Please note that `params.version` is not populated in this response, it is + * tracked separately in the x/upgrade module. + */ + params: ConsensusParams | undefined; +} + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} + +export const QueryParamsRequest = { + encode(_: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): QueryParamsRequest { + return {}; + }, + + toJSON(_: QueryParamsRequest): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + }, +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { params: undefined }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + ConsensusParams.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.params = ConsensusParams.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryParamsResponse { + return { params: isSet(object.params) ? ConsensusParams.fromJSON(object.params) : undefined }; + }, + + toJSON(message: QueryParamsResponse): unknown { + const obj: any = {}; + message.params !== undefined && (obj.params = message.params ? ConsensusParams.toJSON(message.params) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.params = (object.params !== undefined && object.params !== null) + ? ConsensusParams.fromPartial(object.params) + : undefined; + return message; + }, +}; + +/** Query defines the gRPC querier service. */ +export interface Query { + /** Params queries the parameters of x/consensus_param module. */ + Params(request: QueryParamsRequest): Promise; +} + +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.Params = this.Params.bind(this); + } + Params(request: QueryParamsRequest): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.consensus.v1.Query", "Params", data); + return promise.then((data) => QueryParamsResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.consensus.v1/types/cosmos/consensus/v1/tx.ts b/ts-client/cosmos.consensus.v1/types/cosmos/consensus/v1/tx.ts new file mode 100644 index 000000000..826ed65b4 --- /dev/null +++ b/ts-client/cosmos.consensus.v1/types/cosmos/consensus/v1/tx.ts @@ -0,0 +1,196 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { BlockParams, EvidenceParams, ValidatorParams } from "../../../tendermint/types/params"; + +export const protobufPackage = "cosmos.consensus.v1"; + +/** Since: cosmos-sdk 0.47 */ + +/** MsgUpdateParams is the Msg/UpdateParams request type. */ +export interface MsgUpdateParams { + /** authority is the address that controls the module (defaults to x/gov unless overwritten). */ + authority: string; + /** + * params defines the x/consensus parameters to update. + * VersionsParams is not included in this Msg because it is tracked + * separarately in x/upgrade. + * + * NOTE: All parameters must be supplied. + */ + block: BlockParams | undefined; + evidence: EvidenceParams | undefined; + validator: ValidatorParams | undefined; +} + +/** + * MsgUpdateParamsResponse defines the response structure for executing a + * MsgUpdateParams message. + */ +export interface MsgUpdateParamsResponse { +} + +function createBaseMsgUpdateParams(): MsgUpdateParams { + return { authority: "", block: undefined, evidence: undefined, validator: undefined }; +} + +export const MsgUpdateParams = { + encode(message: MsgUpdateParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + if (message.block !== undefined) { + BlockParams.encode(message.block, writer.uint32(18).fork()).ldelim(); + } + if (message.evidence !== undefined) { + EvidenceParams.encode(message.evidence, writer.uint32(26).fork()).ldelim(); + } + if (message.validator !== undefined) { + ValidatorParams.encode(message.validator, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + case 2: + message.block = BlockParams.decode(reader, reader.uint32()); + break; + case 3: + message.evidence = EvidenceParams.decode(reader, reader.uint32()); + break; + case 4: + message.validator = ValidatorParams.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgUpdateParams { + return { + authority: isSet(object.authority) ? String(object.authority) : "", + block: isSet(object.block) ? BlockParams.fromJSON(object.block) : undefined, + evidence: isSet(object.evidence) ? EvidenceParams.fromJSON(object.evidence) : undefined, + validator: isSet(object.validator) ? ValidatorParams.fromJSON(object.validator) : undefined, + }; + }, + + toJSON(message: MsgUpdateParams): unknown { + const obj: any = {}; + message.authority !== undefined && (obj.authority = message.authority); + message.block !== undefined && (obj.block = message.block ? BlockParams.toJSON(message.block) : undefined); + message.evidence !== undefined + && (obj.evidence = message.evidence ? EvidenceParams.toJSON(message.evidence) : undefined); + message.validator !== undefined + && (obj.validator = message.validator ? ValidatorParams.toJSON(message.validator) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): MsgUpdateParams { + const message = createBaseMsgUpdateParams(); + message.authority = object.authority ?? ""; + message.block = (object.block !== undefined && object.block !== null) + ? BlockParams.fromPartial(object.block) + : undefined; + message.evidence = (object.evidence !== undefined && object.evidence !== null) + ? EvidenceParams.fromPartial(object.evidence) + : undefined; + message.validator = (object.validator !== undefined && object.validator !== null) + ? ValidatorParams.fromPartial(object.validator) + : undefined; + return message; + }, +}; + +function createBaseMsgUpdateParamsResponse(): MsgUpdateParamsResponse { + return {}; +} + +export const MsgUpdateParamsResponse = { + encode(_: MsgUpdateParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateParamsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgUpdateParamsResponse { + return {}; + }, + + toJSON(_: MsgUpdateParamsResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgUpdateParamsResponse { + const message = createBaseMsgUpdateParamsResponse(); + return message; + }, +}; + +/** Msg defines the bank Msg service. */ +export interface Msg { + /** + * UpdateParams defines a governance operation for updating the x/consensus_param module parameters. + * The authority is defined in the keeper. + * + * Since: cosmos-sdk 0.47 + */ + UpdateParams(request: MsgUpdateParams): Promise; +} + +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.UpdateParams = this.UpdateParams.bind(this); + } + UpdateParams(request: MsgUpdateParams): Promise { + const data = MsgUpdateParams.encode(request).finish(); + const promise = this.rpc.request("cosmos.consensus.v1.Msg", "UpdateParams", data); + return promise.then((data) => MsgUpdateParamsResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.consensus.v1/types/cosmos/msg/v1/msg.ts b/ts-client/cosmos.consensus.v1/types/cosmos/msg/v1/msg.ts new file mode 100644 index 000000000..f0ff44eb2 --- /dev/null +++ b/ts-client/cosmos.consensus.v1/types/cosmos/msg/v1/msg.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "cosmos.msg.v1"; diff --git a/ts-client/cosmos.consensus.v1/types/cosmos_proto/cosmos.ts b/ts-client/cosmos.consensus.v1/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..79c8d3486 --- /dev/null +++ b/ts-client/cosmos.consensus.v1/types/cosmos_proto/cosmos.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.consensus.v1/types/gogoproto/gogo.ts b/ts-client/cosmos.consensus.v1/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/cosmos.consensus.v1/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/cosmos.consensus.v1/types/google/api/annotations.ts b/ts-client/cosmos.consensus.v1/types/google/api/annotations.ts new file mode 100644 index 000000000..aace4787f --- /dev/null +++ b/ts-client/cosmos.consensus.v1/types/google/api/annotations.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "google.api"; diff --git a/ts-client/cosmos.consensus.v1/types/google/api/http.ts b/ts-client/cosmos.consensus.v1/types/google/api/http.ts new file mode 100644 index 000000000..17e770d15 --- /dev/null +++ b/ts-client/cosmos.consensus.v1/types/google/api/http.ts @@ -0,0 +1,589 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get: + | string + | undefined; + /** Used for updating a resource. */ + put: + | string + | undefined; + /** Used for creating a resource. */ + post: + | string + | undefined; + /** Used for deleting a resource. */ + delete: + | string + | undefined; + /** Used for updating a resource. */ + patch: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined + && (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined + && (obj.custom = message.custom ? CustomHttpPattern.toJSON(message.custom) : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.additionalBindings = []; + } + return obj; + }, + + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.consensus.v1/types/google/protobuf/descriptor.ts b/ts-client/cosmos.consensus.v1/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/cosmos.consensus.v1/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.consensus.v1/types/google/protobuf/duration.ts b/ts-client/cosmos.consensus.v1/types/google/protobuf/duration.ts new file mode 100644 index 000000000..70ce816b7 --- /dev/null +++ b/ts-client/cosmos.consensus.v1/types/google/protobuf/duration.ts @@ -0,0 +1,187 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * A Duration represents a signed, fixed-length span of time represented + * as a count of seconds and fractions of seconds at nanosecond + * resolution. It is independent of any calendar and concepts like "day" + * or "month". It is related to Timestamp in that the difference between + * two Timestamp values is a Duration and it can be added or subtracted + * from a Timestamp. Range is approximately +-10,000 years. + * + * # Examples + * + * Example 1: Compute Duration from two Timestamps in pseudo code. + * + * Timestamp start = ...; + * Timestamp end = ...; + * Duration duration = ...; + * + * duration.seconds = end.seconds - start.seconds; + * duration.nanos = end.nanos - start.nanos; + * + * if (duration.seconds < 0 && duration.nanos > 0) { + * duration.seconds += 1; + * duration.nanos -= 1000000000; + * } else if (duration.seconds > 0 && duration.nanos < 0) { + * duration.seconds -= 1; + * duration.nanos += 1000000000; + * } + * + * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + * + * Timestamp start = ...; + * Duration duration = ...; + * Timestamp end = ...; + * + * end.seconds = start.seconds + duration.seconds; + * end.nanos = start.nanos + duration.nanos; + * + * if (end.nanos < 0) { + * end.seconds -= 1; + * end.nanos += 1000000000; + * } else if (end.nanos >= 1000000000) { + * end.seconds += 1; + * end.nanos -= 1000000000; + * } + * + * Example 3: Compute Duration from datetime.timedelta in Python. + * + * td = datetime.timedelta(days=3, minutes=10) + * duration = Duration() + * duration.FromTimedelta(td) + * + * # JSON Mapping + * + * In JSON format, the Duration type is encoded as a string rather than an + * object, where the string ends in the suffix "s" (indicating seconds) and + * is preceded by the number of seconds, with nanoseconds expressed as + * fractional seconds. For example, 3 seconds with 0 nanoseconds should be + * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + * be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + * microsecond should be expressed in JSON format as "3.000001s". + */ +export interface Duration { + /** + * Signed seconds of the span of time. Must be from -315,576,000,000 + * to +315,576,000,000 inclusive. Note: these bounds are computed from: + * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + */ + seconds: number; + /** + * Signed fractions of a second at nanosecond resolution of the span + * of time. Durations less than one second are represented with a 0 + * `seconds` field and a positive or negative `nanos` field. For durations + * of one second or more, a non-zero value for the `nanos` field must be + * of the same sign as the `seconds` field. Must be from -999,999,999 + * to +999,999,999 inclusive. + */ + nanos: number; +} + +function createBaseDuration(): Duration { + return { seconds: 0, nanos: 0 }; +} + +export const Duration = { + encode(message: Duration, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.seconds !== 0) { + writer.uint32(8).int64(message.seconds); + } + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Duration { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDuration(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.seconds = longToNumber(reader.int64() as Long); + break; + case 2: + message.nanos = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Duration { + return { + seconds: isSet(object.seconds) ? Number(object.seconds) : 0, + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; + }, + + toJSON(message: Duration): unknown { + const obj: any = {}; + message.seconds !== undefined && (obj.seconds = Math.round(message.seconds)); + message.nanos !== undefined && (obj.nanos = Math.round(message.nanos)); + return obj; + }, + + fromPartial, I>>(object: I): Duration { + const message = createBaseDuration(); + message.seconds = object.seconds ?? 0; + message.nanos = object.nanos ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.consensus.v1/types/tendermint/types/params.ts b/ts-client/cosmos.consensus.v1/types/tendermint/types/params.ts new file mode 100644 index 000000000..56ca7eaba --- /dev/null +++ b/ts-client/cosmos.consensus.v1/types/tendermint/types/params.ts @@ -0,0 +1,498 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Duration } from "../../google/protobuf/duration"; + +export const protobufPackage = "tendermint.types"; + +/** + * ConsensusParams contains consensus critical parameters that determine the + * validity of blocks. + */ +export interface ConsensusParams { + block: BlockParams | undefined; + evidence: EvidenceParams | undefined; + validator: ValidatorParams | undefined; + version: VersionParams | undefined; +} + +/** BlockParams contains limits on the block size. */ +export interface BlockParams { + /** + * Max block size, in bytes. + * Note: must be greater than 0 + */ + maxBytes: number; + /** + * Max gas per block. + * Note: must be greater or equal to -1 + */ + maxGas: number; +} + +/** EvidenceParams determine how we handle evidence of malfeasance. */ +export interface EvidenceParams { + /** + * Max age of evidence, in blocks. + * + * The basic formula for calculating this is: MaxAgeDuration / {average block + * time}. + */ + maxAgeNumBlocks: number; + /** + * Max age of evidence, in time. + * + * It should correspond with an app's "unbonding period" or other similar + * mechanism for handling [Nothing-At-Stake + * attacks](https://github.com/ethereum/wiki/wiki/Proof-of-Stake-FAQ#what-is-the-nothing-at-stake-problem-and-how-can-it-be-fixed). + */ + maxAgeDuration: + | Duration + | undefined; + /** + * This sets the maximum size of total evidence in bytes that can be committed in a single block. + * and should fall comfortably under the max block bytes. + * Default is 1048576 or 1MB + */ + maxBytes: number; +} + +/** + * ValidatorParams restrict the public key types validators can use. + * NOTE: uses ABCI pubkey naming, not Amino names. + */ +export interface ValidatorParams { + pubKeyTypes: string[]; +} + +/** VersionParams contains the ABCI application version. */ +export interface VersionParams { + app: number; +} + +/** + * HashedParams is a subset of ConsensusParams. + * + * It is hashed into the Header.ConsensusHash. + */ +export interface HashedParams { + blockMaxBytes: number; + blockMaxGas: number; +} + +function createBaseConsensusParams(): ConsensusParams { + return { block: undefined, evidence: undefined, validator: undefined, version: undefined }; +} + +export const ConsensusParams = { + encode(message: ConsensusParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.block !== undefined) { + BlockParams.encode(message.block, writer.uint32(10).fork()).ldelim(); + } + if (message.evidence !== undefined) { + EvidenceParams.encode(message.evidence, writer.uint32(18).fork()).ldelim(); + } + if (message.validator !== undefined) { + ValidatorParams.encode(message.validator, writer.uint32(26).fork()).ldelim(); + } + if (message.version !== undefined) { + VersionParams.encode(message.version, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConsensusParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.block = BlockParams.decode(reader, reader.uint32()); + break; + case 2: + message.evidence = EvidenceParams.decode(reader, reader.uint32()); + break; + case 3: + message.validator = ValidatorParams.decode(reader, reader.uint32()); + break; + case 4: + message.version = VersionParams.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ConsensusParams { + return { + block: isSet(object.block) ? BlockParams.fromJSON(object.block) : undefined, + evidence: isSet(object.evidence) ? EvidenceParams.fromJSON(object.evidence) : undefined, + validator: isSet(object.validator) ? ValidatorParams.fromJSON(object.validator) : undefined, + version: isSet(object.version) ? VersionParams.fromJSON(object.version) : undefined, + }; + }, + + toJSON(message: ConsensusParams): unknown { + const obj: any = {}; + message.block !== undefined && (obj.block = message.block ? BlockParams.toJSON(message.block) : undefined); + message.evidence !== undefined + && (obj.evidence = message.evidence ? EvidenceParams.toJSON(message.evidence) : undefined); + message.validator !== undefined + && (obj.validator = message.validator ? ValidatorParams.toJSON(message.validator) : undefined); + message.version !== undefined + && (obj.version = message.version ? VersionParams.toJSON(message.version) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ConsensusParams { + const message = createBaseConsensusParams(); + message.block = (object.block !== undefined && object.block !== null) + ? BlockParams.fromPartial(object.block) + : undefined; + message.evidence = (object.evidence !== undefined && object.evidence !== null) + ? EvidenceParams.fromPartial(object.evidence) + : undefined; + message.validator = (object.validator !== undefined && object.validator !== null) + ? ValidatorParams.fromPartial(object.validator) + : undefined; + message.version = (object.version !== undefined && object.version !== null) + ? VersionParams.fromPartial(object.version) + : undefined; + return message; + }, +}; + +function createBaseBlockParams(): BlockParams { + return { maxBytes: 0, maxGas: 0 }; +} + +export const BlockParams = { + encode(message: BlockParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.maxBytes !== 0) { + writer.uint32(8).int64(message.maxBytes); + } + if (message.maxGas !== 0) { + writer.uint32(16).int64(message.maxGas); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BlockParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.maxBytes = longToNumber(reader.int64() as Long); + break; + case 2: + message.maxGas = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): BlockParams { + return { + maxBytes: isSet(object.maxBytes) ? Number(object.maxBytes) : 0, + maxGas: isSet(object.maxGas) ? Number(object.maxGas) : 0, + }; + }, + + toJSON(message: BlockParams): unknown { + const obj: any = {}; + message.maxBytes !== undefined && (obj.maxBytes = Math.round(message.maxBytes)); + message.maxGas !== undefined && (obj.maxGas = Math.round(message.maxGas)); + return obj; + }, + + fromPartial, I>>(object: I): BlockParams { + const message = createBaseBlockParams(); + message.maxBytes = object.maxBytes ?? 0; + message.maxGas = object.maxGas ?? 0; + return message; + }, +}; + +function createBaseEvidenceParams(): EvidenceParams { + return { maxAgeNumBlocks: 0, maxAgeDuration: undefined, maxBytes: 0 }; +} + +export const EvidenceParams = { + encode(message: EvidenceParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.maxAgeNumBlocks !== 0) { + writer.uint32(8).int64(message.maxAgeNumBlocks); + } + if (message.maxAgeDuration !== undefined) { + Duration.encode(message.maxAgeDuration, writer.uint32(18).fork()).ldelim(); + } + if (message.maxBytes !== 0) { + writer.uint32(24).int64(message.maxBytes); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EvidenceParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEvidenceParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.maxAgeNumBlocks = longToNumber(reader.int64() as Long); + break; + case 2: + message.maxAgeDuration = Duration.decode(reader, reader.uint32()); + break; + case 3: + message.maxBytes = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EvidenceParams { + return { + maxAgeNumBlocks: isSet(object.maxAgeNumBlocks) ? Number(object.maxAgeNumBlocks) : 0, + maxAgeDuration: isSet(object.maxAgeDuration) ? Duration.fromJSON(object.maxAgeDuration) : undefined, + maxBytes: isSet(object.maxBytes) ? Number(object.maxBytes) : 0, + }; + }, + + toJSON(message: EvidenceParams): unknown { + const obj: any = {}; + message.maxAgeNumBlocks !== undefined && (obj.maxAgeNumBlocks = Math.round(message.maxAgeNumBlocks)); + message.maxAgeDuration !== undefined + && (obj.maxAgeDuration = message.maxAgeDuration ? Duration.toJSON(message.maxAgeDuration) : undefined); + message.maxBytes !== undefined && (obj.maxBytes = Math.round(message.maxBytes)); + return obj; + }, + + fromPartial, I>>(object: I): EvidenceParams { + const message = createBaseEvidenceParams(); + message.maxAgeNumBlocks = object.maxAgeNumBlocks ?? 0; + message.maxAgeDuration = (object.maxAgeDuration !== undefined && object.maxAgeDuration !== null) + ? Duration.fromPartial(object.maxAgeDuration) + : undefined; + message.maxBytes = object.maxBytes ?? 0; + return message; + }, +}; + +function createBaseValidatorParams(): ValidatorParams { + return { pubKeyTypes: [] }; +} + +export const ValidatorParams = { + encode(message: ValidatorParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.pubKeyTypes) { + writer.uint32(10).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pubKeyTypes.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ValidatorParams { + return { pubKeyTypes: Array.isArray(object?.pubKeyTypes) ? object.pubKeyTypes.map((e: any) => String(e)) : [] }; + }, + + toJSON(message: ValidatorParams): unknown { + const obj: any = {}; + if (message.pubKeyTypes) { + obj.pubKeyTypes = message.pubKeyTypes.map((e) => e); + } else { + obj.pubKeyTypes = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ValidatorParams { + const message = createBaseValidatorParams(); + message.pubKeyTypes = object.pubKeyTypes?.map((e) => e) || []; + return message; + }, +}; + +function createBaseVersionParams(): VersionParams { + return { app: 0 }; +} + +export const VersionParams = { + encode(message: VersionParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.app !== 0) { + writer.uint32(8).uint64(message.app); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): VersionParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVersionParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.app = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): VersionParams { + return { app: isSet(object.app) ? Number(object.app) : 0 }; + }, + + toJSON(message: VersionParams): unknown { + const obj: any = {}; + message.app !== undefined && (obj.app = Math.round(message.app)); + return obj; + }, + + fromPartial, I>>(object: I): VersionParams { + const message = createBaseVersionParams(); + message.app = object.app ?? 0; + return message; + }, +}; + +function createBaseHashedParams(): HashedParams { + return { blockMaxBytes: 0, blockMaxGas: 0 }; +} + +export const HashedParams = { + encode(message: HashedParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockMaxBytes !== 0) { + writer.uint32(8).int64(message.blockMaxBytes); + } + if (message.blockMaxGas !== 0) { + writer.uint32(16).int64(message.blockMaxGas); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HashedParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHashedParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.blockMaxBytes = longToNumber(reader.int64() as Long); + break; + case 2: + message.blockMaxGas = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HashedParams { + return { + blockMaxBytes: isSet(object.blockMaxBytes) ? Number(object.blockMaxBytes) : 0, + blockMaxGas: isSet(object.blockMaxGas) ? Number(object.blockMaxGas) : 0, + }; + }, + + toJSON(message: HashedParams): unknown { + const obj: any = {}; + message.blockMaxBytes !== undefined && (obj.blockMaxBytes = Math.round(message.blockMaxBytes)); + message.blockMaxGas !== undefined && (obj.blockMaxGas = Math.round(message.blockMaxGas)); + return obj; + }, + + fromPartial, I>>(object: I): HashedParams { + const message = createBaseHashedParams(); + message.blockMaxBytes = object.blockMaxBytes ?? 0; + message.blockMaxGas = object.blockMaxGas ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.mint.v1beta1/api.swagger.yml b/ts-client/cosmos.mint.v1beta1/api.swagger.yml new file mode 100644 index 000000000..7e545d1ba --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/api.swagger.yml @@ -0,0 +1,203 @@ +swagger: '2.0' +info: + title: HTTP API Console cosmos.mint.v1beta1 + name: '' + description: '' +paths: + /cosmos/mint/v1beta1/annual_provisions: + get: + operationId: AnnualProvisions + responses: + '200': + description: A successful response. + schema: + type: object + properties: + annual_provisions: + type: string + format: byte + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + tags: + - Query + /cosmos/mint/v1beta1/inflation: + get: + operationId: Inflation + responses: + '200': + description: A successful response. + schema: + type: object + properties: + inflation: + type: string + format: byte + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + tags: + - Query + /cosmos/mint/v1beta1/params: + get: + operationId: Params + responses: + '200': + description: A successful response. + schema: + type: object + properties: + params: + type: object + properties: + mint_denom: + type: string + inflation_rate_change: + type: string + inflation_max: + type: string + inflation_min: + type: string + goal_bonded: + type: string + blocks_per_year: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + tags: + - Query +definitions: + Any: + type: object + properties: + '@type': + type: string + additionalProperties: {} + Status: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + QueryAnnualProvisionsResponse: + type: object + properties: + annual_provisions: + type: string + format: byte + QueryInflationResponse: + type: object + properties: + inflation: + type: string + format: byte + QueryParamsResponse: + type: object + properties: + params: + type: object + properties: + mint_denom: + type: string + inflation_rate_change: + type: string + inflation_max: + type: string + inflation_min: + type: string + goal_bonded: + type: string + blocks_per_year: + type: string + format: uint64 + v1beta1.Params: + type: object + properties: + mint_denom: + type: string + inflation_rate_change: + type: string + inflation_max: + type: string + inflation_min: + type: string + goal_bonded: + type: string + blocks_per_year: + type: string + format: uint64 + MsgUpdateParamsResponse: + type: object + Params: + type: object + properties: + mint_denom: + type: string + inflation_rate_change: + type: string + inflation_max: + type: string + inflation_min: + type: string + goal_bonded: + type: string + blocks_per_year: + type: string + format: uint64 diff --git a/ts-client/cosmos.mint.v1beta1/index.ts b/ts-client/cosmos.mint.v1beta1/index.ts new file mode 100755 index 000000000..11faba741 --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/index.ts @@ -0,0 +1,6 @@ +import IgntModule from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { IgntModule, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/cosmos.mint.v1beta1/module.ts b/ts-client/cosmos.mint.v1beta1/module.ts new file mode 100755 index 000000000..f677e08ab --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/module.ts @@ -0,0 +1,458 @@ +// Generated by Ignite ignite.com/cli + +import { SigningStargateClient, DeliverTxResponse, StdFee } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; +import { QueryAnnualProvisionsResponse } from "./types/cosmos/mint/v1beta1/query"; +import { Minter } from "./types/cosmos/mint/v1beta1/mint"; +import { QueryInflationResponse } from "./types/cosmos/mint/v1beta1/query"; +import { QueryParamsRequest } from "./types/cosmos/mint/v1beta1/query"; +import { QueryParamsResponse } from "./types/cosmos/mint/v1beta1/query"; +import { QueryInflationRequest } from "./types/cosmos/mint/v1beta1/query"; +import { MsgUpdateParamsResponse } from "./types/cosmos/mint/v1beta1/tx"; +import { MsgUpdateParams } from "./types/cosmos/mint/v1beta1/tx"; +import { QueryAnnualProvisionsRequest } from "./types/cosmos/mint/v1beta1/query"; +import { GenesisState } from "./types/cosmos/mint/v1beta1/genesis"; +import { Params } from "./types/cosmos/mint/v1beta1/mint"; + + +export { QueryAnnualProvisionsResponse, Minter, QueryInflationResponse, QueryParamsRequest, QueryParamsResponse, QueryInflationRequest, MsgUpdateParamsResponse, MsgUpdateParams, QueryAnnualProvisionsRequest, GenesisState, Params }; + +type sendQueryAnnualProvisionsResponseParams = { + value: QueryAnnualProvisionsResponse, + fee?: StdFee, + memo?: string +}; + +type sendMinterParams = { + value: Minter, + fee?: StdFee, + memo?: string +}; + +type sendQueryInflationResponseParams = { + value: QueryInflationResponse, + fee?: StdFee, + memo?: string +}; + +type sendQueryParamsRequestParams = { + value: QueryParamsRequest, + fee?: StdFee, + memo?: string +}; + +type sendQueryParamsResponseParams = { + value: QueryParamsResponse, + fee?: StdFee, + memo?: string +}; + +type sendQueryInflationRequestParams = { + value: QueryInflationRequest, + fee?: StdFee, + memo?: string +}; + +type sendMsgUpdateParamsResponseParams = { + value: MsgUpdateParamsResponse, + fee?: StdFee, + memo?: string +}; + +type sendMsgUpdateParamsParams = { + value: MsgUpdateParams, + fee?: StdFee, + memo?: string +}; + +type sendQueryAnnualProvisionsRequestParams = { + value: QueryAnnualProvisionsRequest, + fee?: StdFee, + memo?: string +}; + +type sendGenesisStateParams = { + value: GenesisState, + fee?: StdFee, + memo?: string +}; + +type sendParamsParams = { + value: Params, + fee?: StdFee, + memo?: string +}; + + +type queryAnnualProvisionsResponseParams = { + value: QueryAnnualProvisionsResponse, +}; + +type minterParams = { + value: Minter, +}; + +type queryInflationResponseParams = { + value: QueryInflationResponse, +}; + +type queryParamsRequestParams = { + value: QueryParamsRequest, +}; + +type queryParamsResponseParams = { + value: QueryParamsResponse, +}; + +type queryInflationRequestParams = { + value: QueryInflationRequest, +}; + +type msgUpdateParamsResponseParams = { + value: MsgUpdateParamsResponse, +}; + +type msgUpdateParamsParams = { + value: MsgUpdateParams, +}; + +type queryAnnualProvisionsRequestParams = { + value: QueryAnnualProvisionsRequest, +}; + +type genesisStateParams = { + value: GenesisState, +}; + +type paramsParams = { + value: Params, +}; + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + async sendQueryAnnualProvisionsResponse({ value, fee, memo }: sendQueryAnnualProvisionsResponseParams): Promise { + if (!signer) { + throw new Error('TxClient:sendQueryAnnualProvisionsResponse: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.queryAnnualProvisionsResponse({ value: QueryAnnualProvisionsResponse.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendQueryAnnualProvisionsResponse: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMinter({ value, fee, memo }: sendMinterParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMinter: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.minter({ value: Minter.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMinter: Could not broadcast Tx: '+ e.message) + } + }, + + async sendQueryInflationResponse({ value, fee, memo }: sendQueryInflationResponseParams): Promise { + if (!signer) { + throw new Error('TxClient:sendQueryInflationResponse: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.queryInflationResponse({ value: QueryInflationResponse.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendQueryInflationResponse: Could not broadcast Tx: '+ e.message) + } + }, + + async sendQueryParamsRequest({ value, fee, memo }: sendQueryParamsRequestParams): Promise { + if (!signer) { + throw new Error('TxClient:sendQueryParamsRequest: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.queryParamsRequest({ value: QueryParamsRequest.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendQueryParamsRequest: Could not broadcast Tx: '+ e.message) + } + }, + + async sendQueryParamsResponse({ value, fee, memo }: sendQueryParamsResponseParams): Promise { + if (!signer) { + throw new Error('TxClient:sendQueryParamsResponse: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.queryParamsResponse({ value: QueryParamsResponse.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendQueryParamsResponse: Could not broadcast Tx: '+ e.message) + } + }, + + async sendQueryInflationRequest({ value, fee, memo }: sendQueryInflationRequestParams): Promise { + if (!signer) { + throw new Error('TxClient:sendQueryInflationRequest: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.queryInflationRequest({ value: QueryInflationRequest.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendQueryInflationRequest: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgUpdateParamsResponse({ value, fee, memo }: sendMsgUpdateParamsResponseParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgUpdateParamsResponse: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.msgUpdateParamsResponse({ value: MsgUpdateParamsResponse.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgUpdateParamsResponse: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgUpdateParams({ value, fee, memo }: sendMsgUpdateParamsParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgUpdateParams: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.msgUpdateParams({ value: MsgUpdateParams.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgUpdateParams: Could not broadcast Tx: '+ e.message) + } + }, + + async sendQueryAnnualProvisionsRequest({ value, fee, memo }: sendQueryAnnualProvisionsRequestParams): Promise { + if (!signer) { + throw new Error('TxClient:sendQueryAnnualProvisionsRequest: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.queryAnnualProvisionsRequest({ value: QueryAnnualProvisionsRequest.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendQueryAnnualProvisionsRequest: Could not broadcast Tx: '+ e.message) + } + }, + + async sendGenesisState({ value, fee, memo }: sendGenesisStateParams): Promise { + if (!signer) { + throw new Error('TxClient:sendGenesisState: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.genesisState({ value: GenesisState.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendGenesisState: Could not broadcast Tx: '+ e.message) + } + }, + + async sendParams({ value, fee, memo }: sendParamsParams): Promise { + if (!signer) { + throw new Error('TxClient:sendParams: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry}); + let msg = this.params({ value: Params.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendParams: Could not broadcast Tx: '+ e.message) + } + }, + + + queryAnnualProvisionsResponse({ value }: queryAnnualProvisionsResponseParams): EncodeObject { + try { + return { typeUrl: "/cosmos.mint.v1beta1.QueryAnnualProvisionsResponse", value: QueryAnnualProvisionsResponse.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:QueryAnnualProvisionsResponse: Could not create message: ' + e.message) + } + }, + + minter({ value }: minterParams): EncodeObject { + try { + return { typeUrl: "/cosmos.mint.v1beta1.Minter", value: Minter.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:Minter: Could not create message: ' + e.message) + } + }, + + queryInflationResponse({ value }: queryInflationResponseParams): EncodeObject { + try { + return { typeUrl: "/cosmos.mint.v1beta1.QueryInflationResponse", value: QueryInflationResponse.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:QueryInflationResponse: Could not create message: ' + e.message) + } + }, + + queryParamsRequest({ value }: queryParamsRequestParams): EncodeObject { + try { + return { typeUrl: "/cosmos.mint.v1beta1.QueryParamsRequest", value: QueryParamsRequest.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:QueryParamsRequest: Could not create message: ' + e.message) + } + }, + + queryParamsResponse({ value }: queryParamsResponseParams): EncodeObject { + try { + return { typeUrl: "/cosmos.mint.v1beta1.QueryParamsResponse", value: QueryParamsResponse.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:QueryParamsResponse: Could not create message: ' + e.message) + } + }, + + queryInflationRequest({ value }: queryInflationRequestParams): EncodeObject { + try { + return { typeUrl: "/cosmos.mint.v1beta1.QueryInflationRequest", value: QueryInflationRequest.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:QueryInflationRequest: Could not create message: ' + e.message) + } + }, + + msgUpdateParamsResponse({ value }: msgUpdateParamsResponseParams): EncodeObject { + try { + return { typeUrl: "/cosmos.mint.v1beta1.MsgUpdateParamsResponse", value: MsgUpdateParamsResponse.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgUpdateParamsResponse: Could not create message: ' + e.message) + } + }, + + msgUpdateParams({ value }: msgUpdateParamsParams): EncodeObject { + try { + return { typeUrl: "/cosmos.mint.v1beta1.MsgUpdateParams", value: MsgUpdateParams.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgUpdateParams: Could not create message: ' + e.message) + } + }, + + queryAnnualProvisionsRequest({ value }: queryAnnualProvisionsRequestParams): EncodeObject { + try { + return { typeUrl: "/cosmos.mint.v1beta1.QueryAnnualProvisionsRequest", value: QueryAnnualProvisionsRequest.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:QueryAnnualProvisionsRequest: Could not create message: ' + e.message) + } + }, + + genesisState({ value }: genesisStateParams): EncodeObject { + try { + return { typeUrl: "/cosmos.mint.v1beta1.GenesisState", value: GenesisState.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:GenesisState: Could not create message: ' + e.message) + } + }, + + params({ value }: paramsParams): EncodeObject { + try { + return { typeUrl: "/cosmos.mint.v1beta1.Params", value: Params.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:Params: Could not create message: ' + e.message) + } + }, + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const IgntModule = (test: IgniteClient) => { + return { + module: { + CosmosMintV1Beta1: new SDKModule(test) + }, + registry: msgTypes + } +} +export default IgntModule; \ No newline at end of file diff --git a/ts-client/cosmos.mint.v1beta1/registry.ts b/ts-client/cosmos.mint.v1beta1/registry.ts new file mode 100755 index 000000000..7758b93e5 --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/registry.ts @@ -0,0 +1,29 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; +import { QueryAnnualProvisionsResponse } from "./types/cosmos/mint/v1beta1/query"; +import { Minter } from "./types/cosmos/mint/v1beta1/mint"; +import { QueryInflationResponse } from "./types/cosmos/mint/v1beta1/query"; +import { QueryParamsRequest } from "./types/cosmos/mint/v1beta1/query"; +import { QueryParamsResponse } from "./types/cosmos/mint/v1beta1/query"; +import { QueryInflationRequest } from "./types/cosmos/mint/v1beta1/query"; +import { MsgUpdateParamsResponse } from "./types/cosmos/mint/v1beta1/tx"; +import { MsgUpdateParams } from "./types/cosmos/mint/v1beta1/tx"; +import { QueryAnnualProvisionsRequest } from "./types/cosmos/mint/v1beta1/query"; +import { GenesisState } from "./types/cosmos/mint/v1beta1/genesis"; +import { Params } from "./types/cosmos/mint/v1beta1/mint"; + +const msgTypes: Array<[string, GeneratedType]> = [ + ["/cosmos.mint.v1beta1.QueryAnnualProvisionsResponse", QueryAnnualProvisionsResponse], + ["/cosmos.mint.v1beta1.Minter", Minter], + ["/cosmos.mint.v1beta1.QueryInflationResponse", QueryInflationResponse], + ["/cosmos.mint.v1beta1.QueryParamsRequest", QueryParamsRequest], + ["/cosmos.mint.v1beta1.QueryParamsResponse", QueryParamsResponse], + ["/cosmos.mint.v1beta1.QueryInflationRequest", QueryInflationRequest], + ["/cosmos.mint.v1beta1.MsgUpdateParamsResponse", MsgUpdateParamsResponse], + ["/cosmos.mint.v1beta1.MsgUpdateParams", MsgUpdateParams], + ["/cosmos.mint.v1beta1.QueryAnnualProvisionsRequest", QueryAnnualProvisionsRequest], + ["/cosmos.mint.v1beta1.GenesisState", GenesisState], + ["/cosmos.mint.v1beta1.Params", Params], + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/cosmos.mint.v1beta1/rest.ts b/ts-client/cosmos.mint.v1beta1/rest.ts new file mode 100644 index 000000000..e65efbf5d --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/rest.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +export interface Any { + "@type"?: string; +} + +export interface Status { + /** @format int32 */ + code?: number; + message?: string; + details?: { "@type"?: string }[]; +} + +export interface QueryAnnualProvisionsResponse { + /** @format byte */ + annual_provisions?: string; +} + +export interface QueryInflationResponse { + /** @format byte */ + inflation?: string; +} + +export interface QueryParamsResponse { + params?: { + mint_denom?: string; + inflation_rate_change?: string; + inflation_max?: string; + inflation_min?: string; + goal_bonded?: string; + blocks_per_year?: string; + }; +} + +export interface V1Beta1Params { + mint_denom?: string; + inflation_rate_change?: string; + inflation_max?: string; + inflation_min?: string; + goal_bonded?: string; + + /** @format uint64 */ + blocks_per_year?: string; +} + +export type MsgUpdateParamsResponse = object; + +export interface Params { + mint_denom?: string; + inflation_rate_change?: string; + inflation_max?: string; + inflation_min?: string; + goal_bonded?: string; + + /** @format uint64 */ + blocks_per_year?: string; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title HTTP API Console cosmos.mint.v1beta1 + */ +export class Api extends HttpClient { + /** + * No description + * + * @tags Query + * @name AnnualProvisions + * @request GET:/cosmos/mint/v1beta1/annual_provisions + */ + annualProvisions = (params: RequestParams = {}) => + this.request<{ annual_provisions?: string }, { code?: number; message?: string; details?: { "@type"?: string }[] }>( + { + path: `/cosmos/mint/v1beta1/annual_provisions`, + method: "GET", + ...params, + }, + ); + + /** + * No description + * + * @tags Query + * @name Inflation + * @request GET:/cosmos/mint/v1beta1/inflation + */ + inflation = (params: RequestParams = {}) => + this.request<{ inflation?: string }, { code?: number; message?: string; details?: { "@type"?: string }[] }>({ + path: `/cosmos/mint/v1beta1/inflation`, + method: "GET", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name Params + * @request GET:/cosmos/mint/v1beta1/params + */ + params = (params: RequestParams = {}) => + this.request< + { + params?: { + mint_denom?: string; + inflation_rate_change?: string; + inflation_max?: string; + inflation_min?: string; + goal_bonded?: string; + blocks_per_year?: string; + }; + }, + { code?: number; message?: string; details?: { "@type"?: string }[] } + >({ + path: `/cosmos/mint/v1beta1/params`, + method: "GET", + ...params, + }); +} diff --git a/ts-client/cosmos.mint.v1beta1/types.ts b/ts-client/cosmos.mint.v1beta1/types.ts new file mode 100755 index 000000000..fbb084a7e --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/types.ts @@ -0,0 +1,5 @@ + + +export { + + } \ No newline at end of file diff --git a/ts-client/cosmos.mint.v1beta1/types/amino/amino.ts b/ts-client/cosmos.mint.v1beta1/types/amino/amino.ts new file mode 100644 index 000000000..54af2e625 --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/types/amino/amino.ts @@ -0,0 +1,3 @@ +/* eslint-disable */ + +export const protobufPackage = "amino"; diff --git a/ts-client/cosmos.mint.v1beta1/types/cosmos/mint/v1beta1/genesis.ts b/ts-client/cosmos.mint.v1beta1/types/cosmos/mint/v1beta1/genesis.ts new file mode 100644 index 000000000..cc946bb3f --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/types/cosmos/mint/v1beta1/genesis.ts @@ -0,0 +1,108 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Minter, Params } from "./mint"; + +export const protobufPackage = "cosmos.mint.v1beta1"; + +/** GenesisState defines the mint module's genesis state. */ +export interface GenesisState { + /** minter is a space for holding current inflation information. */ + minter: + | Minter + | undefined; + /** params defines all the parameters of the module. */ + params: Params | undefined; +} + +function createBaseGenesisState(): GenesisState { + return { minter: undefined, params: undefined }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.minter !== undefined) { + Minter.encode(message.minter, writer.uint32(10).fork()).ldelim(); + } + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.minter = Minter.decode(reader, reader.uint32()); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.params = Params.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): GenesisState { + return { + minter: isSet(object.minter) ? Minter.fromJSON(object.minter) : undefined, + params: isSet(object.params) ? Params.fromJSON(object.params) : undefined, + }; + }, + + toJSON(message: GenesisState): unknown { + const obj: any = {}; + if (message.minter !== undefined) { + obj.minter = Minter.toJSON(message.minter); + } + if (message.params !== undefined) { + obj.params = Params.toJSON(message.params); + } + return obj; + }, + + create, I>>(base?: I): GenesisState { + return GenesisState.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): GenesisState { + const message = createBaseGenesisState(); + message.minter = (object.minter !== undefined && object.minter !== null) + ? Minter.fromPartial(object.minter) + : undefined; + message.params = (object.params !== undefined && object.params !== null) + ? Params.fromPartial(object.params) + : undefined; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.mint.v1beta1/types/cosmos/mint/v1beta1/mint.ts b/ts-client/cosmos.mint.v1beta1/types/cosmos/mint/v1beta1/mint.ts new file mode 100644 index 000000000..53c3f381a --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/types/cosmos/mint/v1beta1/mint.ts @@ -0,0 +1,290 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.mint.v1beta1"; + +/** Minter represents the minting state. */ +export interface Minter { + /** current annual inflation rate */ + inflation: string; + /** current annual expected provisions */ + annualProvisions: string; +} + +/** Params defines the parameters for the x/mint module. */ +export interface Params { + /** type of coin to mint */ + mintDenom: string; + /** maximum annual change in inflation rate */ + inflationRateChange: string; + /** maximum inflation rate */ + inflationMax: string; + /** minimum inflation rate */ + inflationMin: string; + /** goal of percent bonded atoms */ + goalBonded: string; + /** expected blocks per year */ + blocksPerYear: number; +} + +function createBaseMinter(): Minter { + return { inflation: "", annualProvisions: "" }; +} + +export const Minter = { + encode(message: Minter, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.inflation !== "") { + writer.uint32(10).string(message.inflation); + } + if (message.annualProvisions !== "") { + writer.uint32(18).string(message.annualProvisions); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Minter { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMinter(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.inflation = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.annualProvisions = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Minter { + return { + inflation: isSet(object.inflation) ? String(object.inflation) : "", + annualProvisions: isSet(object.annualProvisions) ? String(object.annualProvisions) : "", + }; + }, + + toJSON(message: Minter): unknown { + const obj: any = {}; + if (message.inflation !== "") { + obj.inflation = message.inflation; + } + if (message.annualProvisions !== "") { + obj.annualProvisions = message.annualProvisions; + } + return obj; + }, + + create, I>>(base?: I): Minter { + return Minter.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Minter { + const message = createBaseMinter(); + message.inflation = object.inflation ?? ""; + message.annualProvisions = object.annualProvisions ?? ""; + return message; + }, +}; + +function createBaseParams(): Params { + return { + mintDenom: "", + inflationRateChange: "", + inflationMax: "", + inflationMin: "", + goalBonded: "", + blocksPerYear: 0, + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.mintDenom !== "") { + writer.uint32(10).string(message.mintDenom); + } + if (message.inflationRateChange !== "") { + writer.uint32(18).string(message.inflationRateChange); + } + if (message.inflationMax !== "") { + writer.uint32(26).string(message.inflationMax); + } + if (message.inflationMin !== "") { + writer.uint32(34).string(message.inflationMin); + } + if (message.goalBonded !== "") { + writer.uint32(42).string(message.goalBonded); + } + if (message.blocksPerYear !== 0) { + writer.uint32(48).uint64(message.blocksPerYear); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.mintDenom = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.inflationRateChange = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.inflationMax = reader.string(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.inflationMin = reader.string(); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.goalBonded = reader.string(); + continue; + case 6: + if (tag !== 48) { + break; + } + + message.blocksPerYear = longToNumber(reader.uint64() as Long); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Params { + return { + mintDenom: isSet(object.mintDenom) ? String(object.mintDenom) : "", + inflationRateChange: isSet(object.inflationRateChange) ? String(object.inflationRateChange) : "", + inflationMax: isSet(object.inflationMax) ? String(object.inflationMax) : "", + inflationMin: isSet(object.inflationMin) ? String(object.inflationMin) : "", + goalBonded: isSet(object.goalBonded) ? String(object.goalBonded) : "", + blocksPerYear: isSet(object.blocksPerYear) ? Number(object.blocksPerYear) : 0, + }; + }, + + toJSON(message: Params): unknown { + const obj: any = {}; + if (message.mintDenom !== "") { + obj.mintDenom = message.mintDenom; + } + if (message.inflationRateChange !== "") { + obj.inflationRateChange = message.inflationRateChange; + } + if (message.inflationMax !== "") { + obj.inflationMax = message.inflationMax; + } + if (message.inflationMin !== "") { + obj.inflationMin = message.inflationMin; + } + if (message.goalBonded !== "") { + obj.goalBonded = message.goalBonded; + } + if (message.blocksPerYear !== 0) { + obj.blocksPerYear = Math.round(message.blocksPerYear); + } + return obj; + }, + + create, I>>(base?: I): Params { + return Params.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Params { + const message = createBaseParams(); + message.mintDenom = object.mintDenom ?? ""; + message.inflationRateChange = object.inflationRateChange ?? ""; + message.inflationMax = object.inflationMax ?? ""; + message.inflationMin = object.inflationMin ?? ""; + message.goalBonded = object.goalBonded ?? ""; + message.blocksPerYear = object.blocksPerYear ?? 0; + return message; + }, +}; + +declare const self: any | undefined; +declare const window: any | undefined; +declare const global: any | undefined; +const tsProtoGlobalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.mint.v1beta1/types/cosmos/mint/v1beta1/query.ts b/ts-client/cosmos.mint.v1beta1/types/cosmos/mint/v1beta1/query.ts new file mode 100644 index 000000000..4653b6126 --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/types/cosmos/mint/v1beta1/query.ts @@ -0,0 +1,453 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Params } from "./mint"; + +export const protobufPackage = "cosmos.mint.v1beta1"; + +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequest { +} + +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponse { + /** params defines the parameters of the module. */ + params: Params | undefined; +} + +/** QueryInflationRequest is the request type for the Query/Inflation RPC method. */ +export interface QueryInflationRequest { +} + +/** + * QueryInflationResponse is the response type for the Query/Inflation RPC + * method. + */ +export interface QueryInflationResponse { + /** inflation is the current minting inflation value. */ + inflation: Uint8Array; +} + +/** + * QueryAnnualProvisionsRequest is the request type for the + * Query/AnnualProvisions RPC method. + */ +export interface QueryAnnualProvisionsRequest { +} + +/** + * QueryAnnualProvisionsResponse is the response type for the + * Query/AnnualProvisions RPC method. + */ +export interface QueryAnnualProvisionsResponse { + /** annual_provisions is the current minting annual provisions value. */ + annualProvisions: Uint8Array; +} + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} + +export const QueryParamsRequest = { + encode(_: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(_: any): QueryParamsRequest { + return {}; + }, + + toJSON(_: QueryParamsRequest): unknown { + const obj: any = {}; + return obj; + }, + + create, I>>(base?: I): QueryParamsRequest { + return QueryParamsRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(_: I): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + }, +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { params: undefined }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.params = Params.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryParamsResponse { + return { params: isSet(object.params) ? Params.fromJSON(object.params) : undefined }; + }, + + toJSON(message: QueryParamsResponse): unknown { + const obj: any = {}; + if (message.params !== undefined) { + obj.params = Params.toJSON(message.params); + } + return obj; + }, + + create, I>>(base?: I): QueryParamsResponse { + return QueryParamsResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.params = (object.params !== undefined && object.params !== null) + ? Params.fromPartial(object.params) + : undefined; + return message; + }, +}; + +function createBaseQueryInflationRequest(): QueryInflationRequest { + return {}; +} + +export const QueryInflationRequest = { + encode(_: QueryInflationRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryInflationRequest { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryInflationRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(_: any): QueryInflationRequest { + return {}; + }, + + toJSON(_: QueryInflationRequest): unknown { + const obj: any = {}; + return obj; + }, + + create, I>>(base?: I): QueryInflationRequest { + return QueryInflationRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(_: I): QueryInflationRequest { + const message = createBaseQueryInflationRequest(); + return message; + }, +}; + +function createBaseQueryInflationResponse(): QueryInflationResponse { + return { inflation: new Uint8Array(0) }; +} + +export const QueryInflationResponse = { + encode(message: QueryInflationResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.inflation.length !== 0) { + writer.uint32(10).bytes(message.inflation); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryInflationResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryInflationResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.inflation = reader.bytes(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryInflationResponse { + return { inflation: isSet(object.inflation) ? bytesFromBase64(object.inflation) : new Uint8Array(0) }; + }, + + toJSON(message: QueryInflationResponse): unknown { + const obj: any = {}; + if (message.inflation.length !== 0) { + obj.inflation = base64FromBytes(message.inflation); + } + return obj; + }, + + create, I>>(base?: I): QueryInflationResponse { + return QueryInflationResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): QueryInflationResponse { + const message = createBaseQueryInflationResponse(); + message.inflation = object.inflation ?? new Uint8Array(0); + return message; + }, +}; + +function createBaseQueryAnnualProvisionsRequest(): QueryAnnualProvisionsRequest { + return {}; +} + +export const QueryAnnualProvisionsRequest = { + encode(_: QueryAnnualProvisionsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAnnualProvisionsRequest { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAnnualProvisionsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(_: any): QueryAnnualProvisionsRequest { + return {}; + }, + + toJSON(_: QueryAnnualProvisionsRequest): unknown { + const obj: any = {}; + return obj; + }, + + create, I>>(base?: I): QueryAnnualProvisionsRequest { + return QueryAnnualProvisionsRequest.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(_: I): QueryAnnualProvisionsRequest { + const message = createBaseQueryAnnualProvisionsRequest(); + return message; + }, +}; + +function createBaseQueryAnnualProvisionsResponse(): QueryAnnualProvisionsResponse { + return { annualProvisions: new Uint8Array(0) }; +} + +export const QueryAnnualProvisionsResponse = { + encode(message: QueryAnnualProvisionsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.annualProvisions.length !== 0) { + writer.uint32(10).bytes(message.annualProvisions); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAnnualProvisionsResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAnnualProvisionsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.annualProvisions = reader.bytes(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): QueryAnnualProvisionsResponse { + return { + annualProvisions: isSet(object.annualProvisions) ? bytesFromBase64(object.annualProvisions) : new Uint8Array(0), + }; + }, + + toJSON(message: QueryAnnualProvisionsResponse): unknown { + const obj: any = {}; + if (message.annualProvisions.length !== 0) { + obj.annualProvisions = base64FromBytes(message.annualProvisions); + } + return obj; + }, + + create, I>>(base?: I): QueryAnnualProvisionsResponse { + return QueryAnnualProvisionsResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): QueryAnnualProvisionsResponse { + const message = createBaseQueryAnnualProvisionsResponse(); + message.annualProvisions = object.annualProvisions ?? new Uint8Array(0); + return message; + }, +}; + +/** Query provides defines the gRPC querier service. */ +export interface Query { + /** Params returns the total set of minting parameters. */ + Params(request: QueryParamsRequest): Promise; + /** Inflation returns the current minting inflation value. */ + Inflation(request: QueryInflationRequest): Promise; + /** AnnualProvisions current minting annual provisions value. */ + AnnualProvisions(request: QueryAnnualProvisionsRequest): Promise; +} + +export const QueryServiceName = "cosmos.mint.v1beta1.Query"; +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + private readonly service: string; + constructor(rpc: Rpc, opts?: { service?: string }) { + this.service = opts?.service || QueryServiceName; + this.rpc = rpc; + this.Params = this.Params.bind(this); + this.Inflation = this.Inflation.bind(this); + this.AnnualProvisions = this.AnnualProvisions.bind(this); + } + Params(request: QueryParamsRequest): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "Params", data); + return promise.then((data) => QueryParamsResponse.decode(_m0.Reader.create(data))); + } + + Inflation(request: QueryInflationRequest): Promise { + const data = QueryInflationRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "Inflation", data); + return promise.then((data) => QueryInflationResponse.decode(_m0.Reader.create(data))); + } + + AnnualProvisions(request: QueryAnnualProvisionsRequest): Promise { + const data = QueryAnnualProvisionsRequest.encode(request).finish(); + const promise = this.rpc.request(this.service, "AnnualProvisions", data); + return promise.then((data) => QueryAnnualProvisionsResponse.decode(_m0.Reader.create(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +declare const self: any | undefined; +declare const window: any | undefined; +declare const global: any | undefined; +const tsProtoGlobalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.mint.v1beta1/types/cosmos/mint/v1beta1/tx.ts b/ts-client/cosmos.mint.v1beta1/types/cosmos/mint/v1beta1/tx.ts new file mode 100644 index 000000000..8bb703edd --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/types/cosmos/mint/v1beta1/tx.ts @@ -0,0 +1,195 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Params } from "./mint"; + +export const protobufPackage = "cosmos.mint.v1beta1"; + +/** + * MsgUpdateParams is the Msg/UpdateParams request type. + * + * Since: cosmos-sdk 0.47 + */ +export interface MsgUpdateParams { + /** authority is the address that controls the module (defaults to x/gov unless overwritten). */ + authority: string; + /** + * params defines the x/mint parameters to update. + * + * NOTE: All parameters must be supplied. + */ + params: Params | undefined; +} + +/** + * MsgUpdateParamsResponse defines the response structure for executing a + * MsgUpdateParams message. + * + * Since: cosmos-sdk 0.47 + */ +export interface MsgUpdateParamsResponse { +} + +function createBaseMsgUpdateParams(): MsgUpdateParams { + return { authority: "", params: undefined }; +} + +export const MsgUpdateParams = { + encode(message: MsgUpdateParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateParams { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.authority = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.params = Params.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): MsgUpdateParams { + return { + authority: isSet(object.authority) ? String(object.authority) : "", + params: isSet(object.params) ? Params.fromJSON(object.params) : undefined, + }; + }, + + toJSON(message: MsgUpdateParams): unknown { + const obj: any = {}; + if (message.authority !== "") { + obj.authority = message.authority; + } + if (message.params !== undefined) { + obj.params = Params.toJSON(message.params); + } + return obj; + }, + + create, I>>(base?: I): MsgUpdateParams { + return MsgUpdateParams.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MsgUpdateParams { + const message = createBaseMsgUpdateParams(); + message.authority = object.authority ?? ""; + message.params = (object.params !== undefined && object.params !== null) + ? Params.fromPartial(object.params) + : undefined; + return message; + }, +}; + +function createBaseMsgUpdateParamsResponse(): MsgUpdateParamsResponse { + return {}; +} + +export const MsgUpdateParamsResponse = { + encode(_: MsgUpdateParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateParamsResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateParamsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(_: any): MsgUpdateParamsResponse { + return {}; + }, + + toJSON(_: MsgUpdateParamsResponse): unknown { + const obj: any = {}; + return obj; + }, + + create, I>>(base?: I): MsgUpdateParamsResponse { + return MsgUpdateParamsResponse.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(_: I): MsgUpdateParamsResponse { + const message = createBaseMsgUpdateParamsResponse(); + return message; + }, +}; + +/** Msg defines the x/mint Msg service. */ +export interface Msg { + /** + * UpdateParams defines a governance operation for updating the x/mint module + * parameters. The authority is defaults to the x/gov module account. + * + * Since: cosmos-sdk 0.47 + */ + UpdateParams(request: MsgUpdateParams): Promise; +} + +export const MsgServiceName = "cosmos.mint.v1beta1.Msg"; +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + private readonly service: string; + constructor(rpc: Rpc, opts?: { service?: string }) { + this.service = opts?.service || MsgServiceName; + this.rpc = rpc; + this.UpdateParams = this.UpdateParams.bind(this); + } + UpdateParams(request: MsgUpdateParams): Promise { + const data = MsgUpdateParams.encode(request).finish(); + const promise = this.rpc.request(this.service, "UpdateParams", data); + return promise.then((data) => MsgUpdateParamsResponse.decode(_m0.Reader.create(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.mint.v1beta1/types/cosmos/msg/v1/msg.ts b/ts-client/cosmos.mint.v1beta1/types/cosmos/msg/v1/msg.ts new file mode 100644 index 000000000..093dfbba2 --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/types/cosmos/msg/v1/msg.ts @@ -0,0 +1,3 @@ +/* eslint-disable */ + +export const protobufPackage = "cosmos.msg.v1"; diff --git a/ts-client/cosmos.mint.v1beta1/types/cosmos_proto/cosmos.ts b/ts-client/cosmos.mint.v1beta1/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..8f5aa393d --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/types/cosmos_proto/cosmos.ts @@ -0,0 +1,284 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.description = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.description !== "") { + obj.description = message.description; + } + return obj; + }, + + create, I>>(base?: I): InterfaceDescriptor { + return InterfaceDescriptor.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.description = reader.string(); + continue; + case 3: + if (tag === 24) { + message.fieldType.push(reader.int32() as any); + + continue; + } + + if (tag === 26) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + + continue; + } + + break; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.description !== "") { + obj.description = message.description; + } + if (message.fieldType?.length) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): ScalarDescriptor { + return ScalarDescriptor.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.mint.v1beta1/types/gogoproto/gogo.ts b/ts-client/cosmos.mint.v1beta1/types/gogoproto/gogo.ts new file mode 100644 index 000000000..ecf800e03 --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/types/gogoproto/gogo.ts @@ -0,0 +1,3 @@ +/* eslint-disable */ + +export const protobufPackage = "gogoproto"; diff --git a/ts-client/cosmos.mint.v1beta1/types/google/api/annotations.ts b/ts-client/cosmos.mint.v1beta1/types/google/api/annotations.ts new file mode 100644 index 000000000..c2161053d --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/types/google/api/annotations.ts @@ -0,0 +1,3 @@ +/* eslint-disable */ + +export const protobufPackage = "google.api"; diff --git a/ts-client/cosmos.mint.v1beta1/types/google/api/http.ts b/ts-client/cosmos.mint.v1beta1/types/google/api/http.ts new file mode 100644 index 000000000..9010600ba --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/types/google/api/http.ts @@ -0,0 +1,675 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get?: + | string + | undefined; + /** Used for updating a resource. */ + put?: + | string + | undefined; + /** Used for creating a resource. */ + post?: + | string + | undefined; + /** Used for deleting a resource. */ + delete?: + | string + | undefined; + /** Used for updating a resource. */ + patch?: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom?: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.rules.push(HttpRule.decode(reader, reader.uint32())); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.fullyDecodeReservedExpansion = reader.bool(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules?.length) { + obj.rules = message.rules.map((e) => HttpRule.toJSON(e)); + } + if (message.fullyDecodeReservedExpansion === true) { + obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion; + } + return obj; + }, + + create, I>>(base?: I): Http { + return Http.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.selector = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.get = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.put = reader.string(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.post = reader.string(); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.delete = reader.string(); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.patch = reader.string(); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.body = reader.string(); + continue; + case 12: + if (tag !== 98) { + break; + } + + message.responseBody = reader.string(); + continue; + case 11: + if (tag !== 90) { + break; + } + + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + if (message.selector !== "") { + obj.selector = message.selector; + } + if (message.get !== undefined) { + obj.get = message.get; + } + if (message.put !== undefined) { + obj.put = message.put; + } + if (message.post !== undefined) { + obj.post = message.post; + } + if (message.delete !== undefined) { + obj.delete = message.delete; + } + if (message.patch !== undefined) { + obj.patch = message.patch; + } + if (message.custom !== undefined) { + obj.custom = CustomHttpPattern.toJSON(message.custom); + } + if (message.body !== "") { + obj.body = message.body; + } + if (message.responseBody !== "") { + obj.responseBody = message.responseBody; + } + if (message.additionalBindings?.length) { + obj.additionalBindings = message.additionalBindings.map((e) => HttpRule.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): HttpRule { + return HttpRule.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.kind = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.path = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + if (message.kind !== "") { + obj.kind = message.kind; + } + if (message.path !== "") { + obj.path = message.path; + } + return obj; + }, + + create, I>>(base?: I): CustomHttpPattern { + return CustomHttpPattern.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.mint.v1beta1/types/google/protobuf/descriptor.ts b/ts-client/cosmos.mint.v1beta1/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..0ea8d82d1 --- /dev/null +++ b/ts-client/cosmos.mint.v1beta1/types/google/protobuf/descriptor.ts @@ -0,0 +1,4470 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file?.length) { + obj.file = message.file.map((e) => FileDescriptorProto.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): FileDescriptorSet { + return FileDescriptorSet.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.package = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.dependency.push(reader.string()); + continue; + case 10: + if (tag === 80) { + message.publicDependency.push(reader.int32()); + + continue; + } + + if (tag === 82) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + + continue; + } + + break; + case 11: + if (tag === 88) { + message.weakDependency.push(reader.int32()); + + continue; + } + + if (tag === 90) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + + continue; + } + + break; + case 4: + if (tag !== 34) { + break; + } + + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.options = FileOptions.decode(reader, reader.uint32()); + continue; + case 9: + if (tag !== 74) { + break; + } + + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + continue; + case 12: + if (tag !== 98) { + break; + } + + message.syntax = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.package !== "") { + obj.package = message.package; + } + if (message.dependency?.length) { + obj.dependency = message.dependency; + } + if (message.publicDependency?.length) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } + if (message.weakDependency?.length) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } + if (message.messageType?.length) { + obj.messageType = message.messageType.map((e) => DescriptorProto.toJSON(e)); + } + if (message.enumType?.length) { + obj.enumType = message.enumType.map((e) => EnumDescriptorProto.toJSON(e)); + } + if (message.service?.length) { + obj.service = message.service.map((e) => ServiceDescriptorProto.toJSON(e)); + } + if (message.extension?.length) { + obj.extension = message.extension.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = FileOptions.toJSON(message.options); + } + if (message.sourceCodeInfo !== undefined) { + obj.sourceCodeInfo = SourceCodeInfo.toJSON(message.sourceCodeInfo); + } + if (message.syntax !== "") { + obj.syntax = message.syntax; + } + return obj; + }, + + create, I>>(base?: I): FileDescriptorProto { + return FileDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.options = MessageOptions.decode(reader, reader.uint32()); + continue; + case 9: + if (tag !== 74) { + break; + } + + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + continue; + case 10: + if (tag !== 82) { + break; + } + + message.reservedName.push(reader.string()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.field?.length) { + obj.field = message.field.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.extension?.length) { + obj.extension = message.extension.map((e) => FieldDescriptorProto.toJSON(e)); + } + if (message.nestedType?.length) { + obj.nestedType = message.nestedType.map((e) => DescriptorProto.toJSON(e)); + } + if (message.enumType?.length) { + obj.enumType = message.enumType.map((e) => EnumDescriptorProto.toJSON(e)); + } + if (message.extensionRange?.length) { + obj.extensionRange = message.extensionRange.map((e) => DescriptorProto_ExtensionRange.toJSON(e)); + } + if (message.oneofDecl?.length) { + obj.oneofDecl = message.oneofDecl.map((e) => OneofDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = MessageOptions.toJSON(message.options); + } + if (message.reservedRange?.length) { + obj.reservedRange = message.reservedRange.map((e) => DescriptorProto_ReservedRange.toJSON(e)); + } + if (message.reservedName?.length) { + obj.reservedName = message.reservedName; + } + return obj; + }, + + create, I>>(base?: I): DescriptorProto { + return DescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + if (message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== 0) { + obj.end = Math.round(message.end); + } + if (message.options !== undefined) { + obj.options = ExtensionRangeOptions.toJSON(message.options); + } + return obj; + }, + + create, I>>(base?: I): DescriptorProto_ExtensionRange { + return DescriptorProto_ExtensionRange.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + if (message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== 0) { + obj.end = Math.round(message.end); + } + return obj; + }, + + create, I>>(base?: I): DescriptorProto_ReservedRange { + return DescriptorProto_ReservedRange.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): ExtensionRangeOptions { + return ExtensionRangeOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.number = reader.int32(); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.label = reader.int32() as any; + continue; + case 5: + if (tag !== 40) { + break; + } + + message.type = reader.int32() as any; + continue; + case 6: + if (tag !== 50) { + break; + } + + message.typeName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.extendee = reader.string(); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.defaultValue = reader.string(); + continue; + case 9: + if (tag !== 72) { + break; + } + + message.oneofIndex = reader.int32(); + continue; + case 10: + if (tag !== 82) { + break; + } + + message.jsonName = reader.string(); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.options = FieldOptions.decode(reader, reader.uint32()); + continue; + case 17: + if (tag !== 136) { + break; + } + + message.proto3Optional = reader.bool(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.label !== 1) { + obj.label = fieldDescriptorProto_LabelToJSON(message.label); + } + if (message.type !== 1) { + obj.type = fieldDescriptorProto_TypeToJSON(message.type); + } + if (message.typeName !== "") { + obj.typeName = message.typeName; + } + if (message.extendee !== "") { + obj.extendee = message.extendee; + } + if (message.defaultValue !== "") { + obj.defaultValue = message.defaultValue; + } + if (message.oneofIndex !== 0) { + obj.oneofIndex = Math.round(message.oneofIndex); + } + if (message.jsonName !== "") { + obj.jsonName = message.jsonName; + } + if (message.options !== undefined) { + obj.options = FieldOptions.toJSON(message.options); + } + if (message.proto3Optional === true) { + obj.proto3Optional = message.proto3Optional; + } + return obj; + }, + + create, I>>(base?: I): FieldDescriptorProto { + return FieldDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.options = OneofOptions.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.options !== undefined) { + obj.options = OneofOptions.toJSON(message.options); + } + return obj; + }, + + create, I>>(base?: I): OneofDescriptorProto { + return OneofDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.options = EnumOptions.decode(reader, reader.uint32()); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.reservedName.push(reader.string()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.value?.length) { + obj.value = message.value.map((e) => EnumValueDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = EnumOptions.toJSON(message.options); + } + if (message.reservedRange?.length) { + obj.reservedRange = message.reservedRange.map((e) => EnumDescriptorProto_EnumReservedRange.toJSON(e)); + } + if (message.reservedName?.length) { + obj.reservedName = message.reservedName; + } + return obj; + }, + + create, I>>(base?: I): EnumDescriptorProto { + return EnumDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) || + []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.start = reader.int32(); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.end = reader.int32(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + if (message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== 0) { + obj.end = Math.round(message.end); + } + return obj; + }, + + create, I>>( + base?: I, + ): EnumDescriptorProto_EnumReservedRange { + return EnumDescriptorProto_EnumReservedRange.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.number = reader.int32(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.options = EnumValueOptions.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.options !== undefined) { + obj.options = EnumValueOptions.toJSON(message.options); + } + return obj; + }, + + create, I>>(base?: I): EnumValueDescriptorProto { + return EnumValueDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.options = ServiceOptions.decode(reader, reader.uint32()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.method?.length) { + obj.method = message.method.map((e) => MethodDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = ServiceOptions.toJSON(message.options); + } + return obj; + }, + + create, I>>(base?: I): ServiceDescriptorProto { + return ServiceDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.inputType = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.outputType = reader.string(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.options = MethodOptions.decode(reader, reader.uint32()); + continue; + case 5: + if (tag !== 40) { + break; + } + + message.clientStreaming = reader.bool(); + continue; + case 6: + if (tag !== 48) { + break; + } + + message.serverStreaming = reader.bool(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.inputType !== "") { + obj.inputType = message.inputType; + } + if (message.outputType !== "") { + obj.outputType = message.outputType; + } + if (message.options !== undefined) { + obj.options = MethodOptions.toJSON(message.options); + } + if (message.clientStreaming === true) { + obj.clientStreaming = message.clientStreaming; + } + if (message.serverStreaming === true) { + obj.serverStreaming = message.serverStreaming; + } + return obj; + }, + + create, I>>(base?: I): MethodDescriptorProto { + return MethodDescriptorProto.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.javaPackage = reader.string(); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.javaOuterClassname = reader.string(); + continue; + case 10: + if (tag !== 80) { + break; + } + + message.javaMultipleFiles = reader.bool(); + continue; + case 20: + if (tag !== 160) { + break; + } + + message.javaGenerateEqualsAndHash = reader.bool(); + continue; + case 27: + if (tag !== 216) { + break; + } + + message.javaStringCheckUtf8 = reader.bool(); + continue; + case 9: + if (tag !== 72) { + break; + } + + message.optimizeFor = reader.int32() as any; + continue; + case 11: + if (tag !== 90) { + break; + } + + message.goPackage = reader.string(); + continue; + case 16: + if (tag !== 128) { + break; + } + + message.ccGenericServices = reader.bool(); + continue; + case 17: + if (tag !== 136) { + break; + } + + message.javaGenericServices = reader.bool(); + continue; + case 18: + if (tag !== 144) { + break; + } + + message.pyGenericServices = reader.bool(); + continue; + case 42: + if (tag !== 336) { + break; + } + + message.phpGenericServices = reader.bool(); + continue; + case 23: + if (tag !== 184) { + break; + } + + message.deprecated = reader.bool(); + continue; + case 31: + if (tag !== 248) { + break; + } + + message.ccEnableArenas = reader.bool(); + continue; + case 36: + if (tag !== 290) { + break; + } + + message.objcClassPrefix = reader.string(); + continue; + case 37: + if (tag !== 298) { + break; + } + + message.csharpNamespace = reader.string(); + continue; + case 39: + if (tag !== 314) { + break; + } + + message.swiftPrefix = reader.string(); + continue; + case 40: + if (tag !== 322) { + break; + } + + message.phpClassPrefix = reader.string(); + continue; + case 41: + if (tag !== 330) { + break; + } + + message.phpNamespace = reader.string(); + continue; + case 44: + if (tag !== 354) { + break; + } + + message.phpMetadataNamespace = reader.string(); + continue; + case 45: + if (tag !== 362) { + break; + } + + message.rubyPackage = reader.string(); + continue; + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + if (message.javaPackage !== "") { + obj.javaPackage = message.javaPackage; + } + if (message.javaOuterClassname !== "") { + obj.javaOuterClassname = message.javaOuterClassname; + } + if (message.javaMultipleFiles === true) { + obj.javaMultipleFiles = message.javaMultipleFiles; + } + if (message.javaGenerateEqualsAndHash === true) { + obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash; + } + if (message.javaStringCheckUtf8 === true) { + obj.javaStringCheckUtf8 = message.javaStringCheckUtf8; + } + if (message.optimizeFor !== 1) { + obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor); + } + if (message.goPackage !== "") { + obj.goPackage = message.goPackage; + } + if (message.ccGenericServices === true) { + obj.ccGenericServices = message.ccGenericServices; + } + if (message.javaGenericServices === true) { + obj.javaGenericServices = message.javaGenericServices; + } + if (message.pyGenericServices === true) { + obj.pyGenericServices = message.pyGenericServices; + } + if (message.phpGenericServices === true) { + obj.phpGenericServices = message.phpGenericServices; + } + if (message.deprecated === true) { + obj.deprecated = message.deprecated; + } + if (message.ccEnableArenas === true) { + obj.ccEnableArenas = message.ccEnableArenas; + } + if (message.objcClassPrefix !== "") { + obj.objcClassPrefix = message.objcClassPrefix; + } + if (message.csharpNamespace !== "") { + obj.csharpNamespace = message.csharpNamespace; + } + if (message.swiftPrefix !== "") { + obj.swiftPrefix = message.swiftPrefix; + } + if (message.phpClassPrefix !== "") { + obj.phpClassPrefix = message.phpClassPrefix; + } + if (message.phpNamespace !== "") { + obj.phpNamespace = message.phpNamespace; + } + if (message.phpMetadataNamespace !== "") { + obj.phpMetadataNamespace = message.phpMetadataNamespace; + } + if (message.rubyPackage !== "") { + obj.rubyPackage = message.rubyPackage; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): FileOptions { + return FileOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.messageSetWireFormat = reader.bool(); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.noStandardDescriptorAccessor = reader.bool(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + case 7: + if (tag !== 56) { + break; + } + + message.mapEntry = reader.bool(); + continue; + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + if (message.messageSetWireFormat === true) { + obj.messageSetWireFormat = message.messageSetWireFormat; + } + if (message.noStandardDescriptorAccessor === true) { + obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor; + } + if (message.deprecated === true) { + obj.deprecated = message.deprecated; + } + if (message.mapEntry === true) { + obj.mapEntry = message.mapEntry; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): MessageOptions { + return MessageOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.ctype = reader.int32() as any; + continue; + case 2: + if (tag !== 16) { + break; + } + + message.packed = reader.bool(); + continue; + case 6: + if (tag !== 48) { + break; + } + + message.jstype = reader.int32() as any; + continue; + case 5: + if (tag !== 40) { + break; + } + + message.lazy = reader.bool(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + case 10: + if (tag !== 80) { + break; + } + + message.weak = reader.bool(); + continue; + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + if (message.ctype !== 0) { + obj.ctype = fieldOptions_CTypeToJSON(message.ctype); + } + if (message.packed === true) { + obj.packed = message.packed; + } + if (message.jstype !== 0) { + obj.jstype = fieldOptions_JSTypeToJSON(message.jstype); + } + if (message.lazy === true) { + obj.lazy = message.lazy; + } + if (message.deprecated === true) { + obj.deprecated = message.deprecated; + } + if (message.weak === true) { + obj.weak = message.weak; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): FieldOptions { + return FieldOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): OneofOptions { + return OneofOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + if (tag !== 16) { + break; + } + + message.allowAlias = reader.bool(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.deprecated = reader.bool(); + continue; + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + if (message.allowAlias === true) { + obj.allowAlias = message.allowAlias; + } + if (message.deprecated === true) { + obj.deprecated = message.deprecated; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): EnumOptions { + return EnumOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.deprecated = reader.bool(); + continue; + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + if (message.deprecated === true) { + obj.deprecated = message.deprecated; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): EnumValueOptions { + return EnumValueOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + if (tag !== 264) { + break; + } + + message.deprecated = reader.bool(); + continue; + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + if (message.deprecated === true) { + obj.deprecated = message.deprecated; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): ServiceOptions { + return ServiceOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + if (tag !== 264) { + break; + } + + message.deprecated = reader.bool(); + continue; + case 34: + if (tag !== 272) { + break; + } + + message.idempotencyLevel = reader.int32() as any; + continue; + case 999: + if (tag !== 7994) { + break; + } + + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + if (message.deprecated === true) { + obj.deprecated = message.deprecated; + } + if (message.idempotencyLevel !== 0) { + obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): MethodOptions { + return MethodOptions.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(0), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + if (tag !== 18) { + break; + } + + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.identifierValue = reader.string(); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.positiveIntValue = longToNumber(reader.uint64() as Long); + continue; + case 5: + if (tag !== 40) { + break; + } + + message.negativeIntValue = longToNumber(reader.int64() as Long); + continue; + case 6: + if (tag !== 49) { + break; + } + + message.doubleValue = reader.double(); + continue; + case 7: + if (tag !== 58) { + break; + } + + message.stringValue = reader.bytes(); + continue; + case 8: + if (tag !== 66) { + break; + } + + message.aggregateValue = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(0), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name?.length) { + obj.name = message.name.map((e) => UninterpretedOption_NamePart.toJSON(e)); + } + if (message.identifierValue !== "") { + obj.identifierValue = message.identifierValue; + } + if (message.positiveIntValue !== 0) { + obj.positiveIntValue = Math.round(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + obj.negativeIntValue = Math.round(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + obj.doubleValue = message.doubleValue; + } + if (message.stringValue.length !== 0) { + obj.stringValue = base64FromBytes(message.stringValue); + } + if (message.aggregateValue !== "") { + obj.aggregateValue = message.aggregateValue; + } + return obj; + }, + + create, I>>(base?: I): UninterpretedOption { + return UninterpretedOption.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(0); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.namePart = reader.string(); + continue; + case 2: + if (tag !== 16) { + break; + } + + message.isExtension = reader.bool(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + if (message.namePart !== "") { + obj.namePart = message.namePart; + } + if (message.isExtension === true) { + obj.isExtension = message.isExtension; + } + return obj; + }, + + create, I>>(base?: I): UninterpretedOption_NamePart { + return UninterpretedOption_NamePart.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location?.length) { + obj.location = message.location.map((e) => SourceCodeInfo_Location.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): SourceCodeInfo { + return SourceCodeInfo.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag === 8) { + message.path.push(reader.int32()); + + continue; + } + + if (tag === 10) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + + continue; + } + + break; + case 2: + if (tag === 16) { + message.span.push(reader.int32()); + + continue; + } + + if (tag === 18) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + + continue; + } + + break; + case 3: + if (tag !== 26) { + break; + } + + message.leadingComments = reader.string(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.trailingComments = reader.string(); + continue; + case 6: + if (tag !== 50) { + break; + } + + message.leadingDetachedComments.push(reader.string()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path?.length) { + obj.path = message.path.map((e) => Math.round(e)); + } + if (message.span?.length) { + obj.span = message.span.map((e) => Math.round(e)); + } + if (message.leadingComments !== "") { + obj.leadingComments = message.leadingComments; + } + if (message.trailingComments !== "") { + obj.trailingComments = message.trailingComments; + } + if (message.leadingDetachedComments?.length) { + obj.leadingDetachedComments = message.leadingDetachedComments; + } + return obj; + }, + + create, I>>(base?: I): SourceCodeInfo_Location { + return SourceCodeInfo_Location.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation?.length) { + obj.annotation = message.annotation.map((e) => GeneratedCodeInfo_Annotation.toJSON(e)); + } + return obj; + }, + + create, I>>(base?: I): GeneratedCodeInfo { + return GeneratedCodeInfo.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag === 8) { + message.path.push(reader.int32()); + + continue; + } + + if (tag === 10) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + + continue; + } + + break; + case 2: + if (tag !== 18) { + break; + } + + message.sourceFile = reader.string(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.begin = reader.int32(); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.end = reader.int32(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path?.length) { + obj.path = message.path.map((e) => Math.round(e)); + } + if (message.sourceFile !== "") { + obj.sourceFile = message.sourceFile; + } + if (message.begin !== 0) { + obj.begin = Math.round(message.begin); + } + if (message.end !== 0) { + obj.end = Math.round(message.end); + } + return obj; + }, + + create, I>>(base?: I): GeneratedCodeInfo_Annotation { + return GeneratedCodeInfo_Annotation.fromPartial(base ?? ({} as any)); + }, + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare const self: any | undefined; +declare const window: any | undefined; +declare const global: any | undefined; +const tsProtoGlobalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (tsProtoGlobalThis.Buffer) { + return Uint8Array.from(tsProtoGlobalThis.Buffer.from(b64, "base64")); + } else { + const bin = tsProtoGlobalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (tsProtoGlobalThis.Buffer) { + return tsProtoGlobalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return tsProtoGlobalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.params.v1beta1/api.swagger.yml b/ts-client/cosmos.params.v1beta1/api.swagger.yml new file mode 100644 index 000000000..901cf36dd --- /dev/null +++ b/ts-client/cosmos.params.v1beta1/api.swagger.yml @@ -0,0 +1,160 @@ +swagger: '2.0' +info: + title: HTTP API Console cosmos.params.v1beta1 + name: '' + description: '' +paths: + /cosmos/params/v1beta1/params: + get: + operationId: Params + responses: + '200': + description: A successful response. + schema: + type: object + properties: + param: + type: object + properties: + subspace: + type: string + key: + type: string + value: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: subspace + in: query + required: false + type: string + - name: key + in: query + required: false + type: string + tags: + - Query + /cosmos/params/v1beta1/subspaces: + get: + operationId: Subspaces + responses: + '200': + description: A successful response. + schema: + type: object + properties: + subspaces: + type: array + items: + type: object + properties: + subspace: + type: string + keys: + type: array + items: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + tags: + - Query +definitions: + Any: + type: object + properties: + '@type': + type: string + additionalProperties: {} + Status: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + ParamChange: + type: object + properties: + subspace: + type: string + key: + type: string + value: + type: string + QueryParamsResponse: + type: object + properties: + param: + type: object + properties: + subspace: + type: string + key: + type: string + value: + type: string + QuerySubspacesResponse: + type: object + properties: + subspaces: + type: array + items: + type: object + properties: + subspace: + type: string + keys: + type: array + items: + type: string + Subspace: + type: object + properties: + subspace: + type: string + keys: + type: array + items: + type: string diff --git a/ts-client/cosmos.params.v1beta1/index.ts b/ts-client/cosmos.params.v1beta1/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/cosmos.params.v1beta1/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/cosmos.params.v1beta1/module.ts b/ts-client/cosmos.params.v1beta1/module.ts new file mode 100755 index 000000000..69c0ccf9e --- /dev/null +++ b/ts-client/cosmos.params.v1beta1/module.ts @@ -0,0 +1,102 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; + +import { ParameterChangeProposal as typeParameterChangeProposal} from "./types" +import { ParamChange as typeParamChange} from "./types" +import { Subspace as typeSubspace} from "./types" + +export { }; + + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + ParameterChangeProposal: getStructure(typeParameterChangeProposal.fromPartial({})), + ParamChange: getStructure(typeParamChange.fromPartial({})), + Subspace: getStructure(typeSubspace.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + CosmosParamsV1Beta1: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/cosmos.params.v1beta1/registry.ts b/ts-client/cosmos.params.v1beta1/registry.ts new file mode 100755 index 000000000..26157e4ff --- /dev/null +++ b/ts-client/cosmos.params.v1beta1/registry.ts @@ -0,0 +1,7 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; + +const msgTypes: Array<[string, GeneratedType]> = [ + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/cosmos.params.v1beta1/rest.ts b/ts-client/cosmos.params.v1beta1/rest.ts new file mode 100644 index 000000000..21441b39f --- /dev/null +++ b/ts-client/cosmos.params.v1beta1/rest.ts @@ -0,0 +1,220 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +export interface ProtobufAny { + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +/** +* ParamChange defines an individual parameter change, for use in +ParameterChangeProposal. +*/ +export interface V1Beta1ParamChange { + subspace?: string; + key?: string; + value?: string; +} + +/** + * QueryParamsResponse is response type for the Query/Params RPC method. + */ +export interface V1Beta1QueryParamsResponse { + /** param defines the queried parameter. */ + param?: V1Beta1ParamChange; +} + +/** +* QuerySubspacesResponse defines the response types for querying for all +registered subspaces and all keys for a subspace. + +Since: cosmos-sdk 0.46 +*/ +export interface V1Beta1QuerySubspacesResponse { + subspaces?: V1Beta1Subspace[]; +} + +/** +* Subspace defines a parameter subspace name and all the keys that exist for +the subspace. + +Since: cosmos-sdk 0.46 +*/ +export interface V1Beta1Subspace { + subspace?: string; + keys?: string[]; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title cosmos/params/v1beta1/params.proto + * @version version not set + */ +export class Api extends HttpClient { + /** + * No description + * + * @tags Query + * @name QueryParams + * @summary Params queries a specific parameter of a module, given its subspace and +key. + * @request GET:/cosmos/params/v1beta1/params + */ + queryParams = (query?: { subspace?: string; key?: string }, params: RequestParams = {}) => + this.request({ + path: `/cosmos/params/v1beta1/params`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * @description Since: cosmos-sdk 0.46 + * + * @tags Query + * @name QuerySubspaces + * @summary Subspaces queries for all registered subspaces and all keys for a subspace. + * @request GET:/cosmos/params/v1beta1/subspaces + */ + querySubspaces = (params: RequestParams = {}) => + this.request({ + path: `/cosmos/params/v1beta1/subspaces`, + method: "GET", + format: "json", + ...params, + }); +} diff --git a/ts-client/cosmos.params.v1beta1/types.ts b/ts-client/cosmos.params.v1beta1/types.ts new file mode 100755 index 000000000..2ac7ca983 --- /dev/null +++ b/ts-client/cosmos.params.v1beta1/types.ts @@ -0,0 +1,11 @@ +import { ParameterChangeProposal } from "./types/cosmos/params/v1beta1/params" +import { ParamChange } from "./types/cosmos/params/v1beta1/params" +import { Subspace } from "./types/cosmos/params/v1beta1/query" + + +export { + ParameterChangeProposal, + ParamChange, + Subspace, + + } \ No newline at end of file diff --git a/ts-client/cosmos.params.v1beta1/types/amino/amino.ts b/ts-client/cosmos.params.v1beta1/types/amino/amino.ts new file mode 100644 index 000000000..4b67dcfe4 --- /dev/null +++ b/ts-client/cosmos.params.v1beta1/types/amino/amino.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "amino"; diff --git a/ts-client/cosmos.params.v1beta1/types/cosmos/params/v1beta1/params.ts b/ts-client/cosmos.params.v1beta1/types/cosmos/params/v1beta1/params.ts new file mode 100644 index 000000000..ebcaa371e --- /dev/null +++ b/ts-client/cosmos.params.v1beta1/types/cosmos/params/v1beta1/params.ts @@ -0,0 +1,174 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.params.v1beta1"; + +/** ParameterChangeProposal defines a proposal to change one or more parameters. */ +export interface ParameterChangeProposal { + title: string; + description: string; + changes: ParamChange[]; +} + +/** + * ParamChange defines an individual parameter change, for use in + * ParameterChangeProposal. + */ +export interface ParamChange { + subspace: string; + key: string; + value: string; +} + +function createBaseParameterChangeProposal(): ParameterChangeProposal { + return { title: "", description: "", changes: [] }; +} + +export const ParameterChangeProposal = { + encode(message: ParameterChangeProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + for (const v of message.changes) { + ParamChange.encode(v!, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ParameterChangeProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParameterChangeProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + message.changes.push(ParamChange.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ParameterChangeProposal { + return { + title: isSet(object.title) ? String(object.title) : "", + description: isSet(object.description) ? String(object.description) : "", + changes: Array.isArray(object?.changes) ? object.changes.map((e: any) => ParamChange.fromJSON(e)) : [], + }; + }, + + toJSON(message: ParameterChangeProposal): unknown { + const obj: any = {}; + message.title !== undefined && (obj.title = message.title); + message.description !== undefined && (obj.description = message.description); + if (message.changes) { + obj.changes = message.changes.map((e) => e ? ParamChange.toJSON(e) : undefined); + } else { + obj.changes = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ParameterChangeProposal { + const message = createBaseParameterChangeProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.changes = object.changes?.map((e) => ParamChange.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseParamChange(): ParamChange { + return { subspace: "", key: "", value: "" }; +} + +export const ParamChange = { + encode(message: ParamChange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subspace !== "") { + writer.uint32(10).string(message.subspace); + } + if (message.key !== "") { + writer.uint32(18).string(message.key); + } + if (message.value !== "") { + writer.uint32(26).string(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ParamChange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParamChange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subspace = reader.string(); + break; + case 2: + message.key = reader.string(); + break; + case 3: + message.value = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ParamChange { + return { + subspace: isSet(object.subspace) ? String(object.subspace) : "", + key: isSet(object.key) ? String(object.key) : "", + value: isSet(object.value) ? String(object.value) : "", + }; + }, + + toJSON(message: ParamChange): unknown { + const obj: any = {}; + message.subspace !== undefined && (obj.subspace = message.subspace); + message.key !== undefined && (obj.key = message.key); + message.value !== undefined && (obj.value = message.value); + return obj; + }, + + fromPartial, I>>(object: I): ParamChange { + const message = createBaseParamChange(); + message.subspace = object.subspace ?? ""; + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.params.v1beta1/types/cosmos/params/v1beta1/query.ts b/ts-client/cosmos.params.v1beta1/types/cosmos/params/v1beta1/query.ts new file mode 100644 index 000000000..5553a8406 --- /dev/null +++ b/ts-client/cosmos.params.v1beta1/types/cosmos/params/v1beta1/query.ts @@ -0,0 +1,364 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { ParamChange } from "./params"; + +export const protobufPackage = "cosmos.params.v1beta1"; + +/** QueryParamsRequest is request type for the Query/Params RPC method. */ +export interface QueryParamsRequest { + /** subspace defines the module to query the parameter for. */ + subspace: string; + /** key defines the key of the parameter in the subspace. */ + key: string; +} + +/** QueryParamsResponse is response type for the Query/Params RPC method. */ +export interface QueryParamsResponse { + /** param defines the queried parameter. */ + param: ParamChange | undefined; +} + +/** + * QuerySubspacesRequest defines a request type for querying for all registered + * subspaces and all keys for a subspace. + * + * Since: cosmos-sdk 0.46 + */ +export interface QuerySubspacesRequest { +} + +/** + * QuerySubspacesResponse defines the response types for querying for all + * registered subspaces and all keys for a subspace. + * + * Since: cosmos-sdk 0.46 + */ +export interface QuerySubspacesResponse { + subspaces: Subspace[]; +} + +/** + * Subspace defines a parameter subspace name and all the keys that exist for + * the subspace. + * + * Since: cosmos-sdk 0.46 + */ +export interface Subspace { + subspace: string; + keys: string[]; +} + +function createBaseQueryParamsRequest(): QueryParamsRequest { + return { subspace: "", key: "" }; +} + +export const QueryParamsRequest = { + encode(message: QueryParamsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subspace !== "") { + writer.uint32(10).string(message.subspace); + } + if (message.key !== "") { + writer.uint32(18).string(message.key); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subspace = reader.string(); + break; + case 2: + message.key = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryParamsRequest { + return { + subspace: isSet(object.subspace) ? String(object.subspace) : "", + key: isSet(object.key) ? String(object.key) : "", + }; + }, + + toJSON(message: QueryParamsRequest): unknown { + const obj: any = {}; + message.subspace !== undefined && (obj.subspace = message.subspace); + message.key !== undefined && (obj.key = message.key); + return obj; + }, + + fromPartial, I>>(object: I): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + message.subspace = object.subspace ?? ""; + message.key = object.key ?? ""; + return message; + }, +}; + +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { param: undefined }; +} + +export const QueryParamsResponse = { + encode(message: QueryParamsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.param !== undefined) { + ParamChange.encode(message.param, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.param = ParamChange.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryParamsResponse { + return { param: isSet(object.param) ? ParamChange.fromJSON(object.param) : undefined }; + }, + + toJSON(message: QueryParamsResponse): unknown { + const obj: any = {}; + message.param !== undefined && (obj.param = message.param ? ParamChange.toJSON(message.param) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.param = (object.param !== undefined && object.param !== null) + ? ParamChange.fromPartial(object.param) + : undefined; + return message; + }, +}; + +function createBaseQuerySubspacesRequest(): QuerySubspacesRequest { + return {}; +} + +export const QuerySubspacesRequest = { + encode(_: QuerySubspacesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySubspacesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySubspacesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): QuerySubspacesRequest { + return {}; + }, + + toJSON(_: QuerySubspacesRequest): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): QuerySubspacesRequest { + const message = createBaseQuerySubspacesRequest(); + return message; + }, +}; + +function createBaseQuerySubspacesResponse(): QuerySubspacesResponse { + return { subspaces: [] }; +} + +export const QuerySubspacesResponse = { + encode(message: QuerySubspacesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.subspaces) { + Subspace.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QuerySubspacesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQuerySubspacesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subspaces.push(Subspace.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QuerySubspacesResponse { + return { + subspaces: Array.isArray(object?.subspaces) ? object.subspaces.map((e: any) => Subspace.fromJSON(e)) : [], + }; + }, + + toJSON(message: QuerySubspacesResponse): unknown { + const obj: any = {}; + if (message.subspaces) { + obj.subspaces = message.subspaces.map((e) => e ? Subspace.toJSON(e) : undefined); + } else { + obj.subspaces = []; + } + return obj; + }, + + fromPartial, I>>(object: I): QuerySubspacesResponse { + const message = createBaseQuerySubspacesResponse(); + message.subspaces = object.subspaces?.map((e) => Subspace.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSubspace(): Subspace { + return { subspace: "", keys: [] }; +} + +export const Subspace = { + encode(message: Subspace, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subspace !== "") { + writer.uint32(10).string(message.subspace); + } + for (const v of message.keys) { + writer.uint32(18).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Subspace { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSubspace(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subspace = reader.string(); + break; + case 2: + message.keys.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Subspace { + return { + subspace: isSet(object.subspace) ? String(object.subspace) : "", + keys: Array.isArray(object?.keys) ? object.keys.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: Subspace): unknown { + const obj: any = {}; + message.subspace !== undefined && (obj.subspace = message.subspace); + if (message.keys) { + obj.keys = message.keys.map((e) => e); + } else { + obj.keys = []; + } + return obj; + }, + + fromPartial, I>>(object: I): Subspace { + const message = createBaseSubspace(); + message.subspace = object.subspace ?? ""; + message.keys = object.keys?.map((e) => e) || []; + return message; + }, +}; + +/** Query defines the gRPC querier service. */ +export interface Query { + /** + * Params queries a specific parameter of a module, given its subspace and + * key. + */ + Params(request: QueryParamsRequest): Promise; + /** + * Subspaces queries for all registered subspaces and all keys for a subspace. + * + * Since: cosmos-sdk 0.46 + */ + Subspaces(request: QuerySubspacesRequest): Promise; +} + +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.Params = this.Params.bind(this); + this.Subspaces = this.Subspaces.bind(this); + } + Params(request: QueryParamsRequest): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.params.v1beta1.Query", "Params", data); + return promise.then((data) => QueryParamsResponse.decode(new _m0.Reader(data))); + } + + Subspaces(request: QuerySubspacesRequest): Promise { + const data = QuerySubspacesRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.params.v1beta1.Query", "Subspaces", data); + return promise.then((data) => QuerySubspacesResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.params.v1beta1/types/cosmos_proto/cosmos.ts b/ts-client/cosmos.params.v1beta1/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..79c8d3486 --- /dev/null +++ b/ts-client/cosmos.params.v1beta1/types/cosmos_proto/cosmos.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.params.v1beta1/types/gogoproto/gogo.ts b/ts-client/cosmos.params.v1beta1/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/cosmos.params.v1beta1/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/cosmos.params.v1beta1/types/google/api/annotations.ts b/ts-client/cosmos.params.v1beta1/types/google/api/annotations.ts new file mode 100644 index 000000000..aace4787f --- /dev/null +++ b/ts-client/cosmos.params.v1beta1/types/google/api/annotations.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "google.api"; diff --git a/ts-client/cosmos.params.v1beta1/types/google/api/http.ts b/ts-client/cosmos.params.v1beta1/types/google/api/http.ts new file mode 100644 index 000000000..17e770d15 --- /dev/null +++ b/ts-client/cosmos.params.v1beta1/types/google/api/http.ts @@ -0,0 +1,589 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get: + | string + | undefined; + /** Used for updating a resource. */ + put: + | string + | undefined; + /** Used for creating a resource. */ + post: + | string + | undefined; + /** Used for deleting a resource. */ + delete: + | string + | undefined; + /** Used for updating a resource. */ + patch: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined + && (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined + && (obj.custom = message.custom ? CustomHttpPattern.toJSON(message.custom) : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.additionalBindings = []; + } + return obj; + }, + + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.params.v1beta1/types/google/protobuf/descriptor.ts b/ts-client/cosmos.params.v1beta1/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/cosmos.params.v1beta1/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/index.ts b/ts-client/cosmos.tx.v1beta1/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/cosmos.tx.v1beta1/module.ts b/ts-client/cosmos.tx.v1beta1/module.ts new file mode 100755 index 000000000..df728d396 --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/module.ts @@ -0,0 +1,122 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; + +import { Tx as typeTx} from "./types" +import { TxRaw as typeTxRaw} from "./types" +import { SignDoc as typeSignDoc} from "./types" +import { SignDocDirectAux as typeSignDocDirectAux} from "./types" +import { TxBody as typeTxBody} from "./types" +import { AuthInfo as typeAuthInfo} from "./types" +import { SignerInfo as typeSignerInfo} from "./types" +import { ModeInfo as typeModeInfo} from "./types" +import { ModeInfo_Single as typeModeInfo_Single} from "./types" +import { ModeInfo_Multi as typeModeInfo_Multi} from "./types" +import { Fee as typeFee} from "./types" +import { Tip as typeTip} from "./types" +import { AuxSignerData as typeAuxSignerData} from "./types" + +export { }; + + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + Tx: getStructure(typeTx.fromPartial({})), + TxRaw: getStructure(typeTxRaw.fromPartial({})), + SignDoc: getStructure(typeSignDoc.fromPartial({})), + SignDocDirectAux: getStructure(typeSignDocDirectAux.fromPartial({})), + TxBody: getStructure(typeTxBody.fromPartial({})), + AuthInfo: getStructure(typeAuthInfo.fromPartial({})), + SignerInfo: getStructure(typeSignerInfo.fromPartial({})), + ModeInfo: getStructure(typeModeInfo.fromPartial({})), + ModeInfo_Single: getStructure(typeModeInfo_Single.fromPartial({})), + ModeInfo_Multi: getStructure(typeModeInfo_Multi.fromPartial({})), + Fee: getStructure(typeFee.fromPartial({})), + Tip: getStructure(typeTip.fromPartial({})), + AuxSignerData: getStructure(typeAuxSignerData.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + CosmosTxV1Beta1: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/cosmos.tx.v1beta1/registry.ts b/ts-client/cosmos.tx.v1beta1/registry.ts new file mode 100755 index 000000000..26157e4ff --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/registry.ts @@ -0,0 +1,7 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; + +const msgTypes: Array<[string, GeneratedType]> = [ + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/cosmos.tx.v1beta1/rest.ts b/ts-client/cosmos.tx.v1beta1/rest.ts new file mode 100644 index 000000000..ec7a6207d --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/rest.ts @@ -0,0 +1,1533 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +/** +* Event allows application developers to attach additional information to +ResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx. +Later, transactions may be queried using these events. +*/ +export interface AbciEvent { + type?: string; + attributes?: AbciEventAttribute[]; +} + +/** + * EventAttribute is a single key-value pair, associated with an event. + */ +export interface AbciEventAttribute { + key?: string; + value?: string; + + /** nondeterministic */ + index?: boolean; +} + +/** + * Result is the union of ResponseFormat and ResponseCheckTx. + */ +export interface Abciv1Beta1Result { + /** + * Data is any data returned from message or handler execution. It MUST be + * length prefixed in order to separate data from multiple message executions. + * Deprecated. This field is still populated, but prefer msg_response instead + * because it also contains the Msg response typeURL. + * @format byte + */ + data?: string; + + /** Log contains the log information from message or handler execution. */ + log?: string; + + /** + * Events contains a slice of Event objects that were emitted during message + * or handler execution. + */ + events?: AbciEvent[]; + + /** + * msg_responses contains the Msg handler responses type packed in Anys. + * + * Since: cosmos-sdk 0.46 + */ + msg_responses?: ProtobufAny[]; +} + +export interface CryptoPublicKey { + /** @format byte */ + ed25519?: string; + + /** @format byte */ + secp256k1?: string; +} + +/** +* `Any` contains an arbitrary serialized protocol buffer message along with a +URL that describes the type of the serialized message. + +Protobuf library provides support to pack/unpack Any values in the form +of utility functions or additional generated methods of the Any type. + +Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + +Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + +The pack methods provided by protobuf library will by default use +'type.googleapis.com/full.type.name' as the type URL and the unpack +methods only use the fully qualified type name after the last '/' +in the type URL, for example "foo.bar.com/x/y.z" will yield type +name "y.z". + + +JSON +==== +The JSON representation of an `Any` value uses the regular +representation of the deserialized, embedded message, with an +additional field `@type` which contains the type URL. Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + +If the embedded message type is well-known and has a custom JSON +representation, that representation will be embedded adding a field +`value` which holds the custom JSON in addition to the `@type` +field. Example (for message [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } +*/ +export interface ProtobufAny { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +export interface TenderminttypesData { + /** + * Txs that will be applied by state @ block.Height+1. + * NOTE: not all txs here are valid. We're just agreeing on the order first. + * This means that block.AppHash does not include these txs. + */ + txs?: string[]; +} + +export interface TenderminttypesValidator { + /** @format byte */ + address?: string; + pub_key?: CryptoPublicKey; + + /** @format int64 */ + voting_power?: string; + + /** @format int64 */ + proposer_priority?: string; +} + +export interface TypesBlock { + /** Header defines the structure of a block header. */ + header?: TypesHeader; + data?: TenderminttypesData; + evidence?: TypesEvidenceList; + + /** Commit contains the evidence that a block was committed by a set of validators. */ + last_commit?: TypesCommit; +} + +export interface TypesBlockID { + /** @format byte */ + hash?: string; + part_set_header?: TypesPartSetHeader; +} + +export enum TypesBlockIDFlag { + BLOCK_ID_FLAG_UNKNOWN = "BLOCK_ID_FLAG_UNKNOWN", + BLOCK_ID_FLAG_ABSENT = "BLOCK_ID_FLAG_ABSENT", + BLOCK_ID_FLAG_COMMIT = "BLOCK_ID_FLAG_COMMIT", + BLOCK_ID_FLAG_NIL = "BLOCK_ID_FLAG_NIL", +} + +/** + * Commit contains the evidence that a block was committed by a set of validators. + */ +export interface TypesCommit { + /** @format int64 */ + height?: string; + + /** @format int32 */ + round?: number; + block_id?: TypesBlockID; + signatures?: TypesCommitSig[]; +} + +/** + * CommitSig is a part of the Vote included in a Commit. + */ +export interface TypesCommitSig { + block_id_flag?: TypesBlockIDFlag; + + /** @format byte */ + validator_address?: string; + + /** @format date-time */ + timestamp?: string; + + /** @format byte */ + signature?: string; +} + +/** + * DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes. + */ +export interface TypesDuplicateVoteEvidence { + /** + * Vote represents a prevote, precommit, or commit vote from validators for + * consensus. + */ + vote_a?: TypesVote; + + /** + * Vote represents a prevote, precommit, or commit vote from validators for + * consensus. + */ + vote_b?: TypesVote; + + /** @format int64 */ + total_voting_power?: string; + + /** @format int64 */ + validator_power?: string; + + /** @format date-time */ + timestamp?: string; +} + +export interface TypesEvidence { + /** DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes. */ + duplicate_vote_evidence?: TypesDuplicateVoteEvidence; + + /** LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client. */ + light_client_attack_evidence?: TypesLightClientAttackEvidence; +} + +export interface TypesEvidenceList { + evidence?: TypesEvidence[]; +} + +/** + * Header defines the structure of a block header. + */ +export interface TypesHeader { + /** + * basic block info + * Consensus captures the consensus rules for processing a block in the blockchain, + * including all blockchain data structures and the rules of the application's + * state transition machine. + */ + version?: VersionConsensus; + chain_id?: string; + + /** @format int64 */ + height?: string; + + /** @format date-time */ + time?: string; + + /** prev block info */ + last_block_id?: TypesBlockID; + + /** + * hashes of block data + * commit from validators from the last block + * @format byte + */ + last_commit_hash?: string; + + /** + * transactions + * @format byte + */ + data_hash?: string; + + /** + * hashes from the app output from the prev block + * validators for the current block + * @format byte + */ + validators_hash?: string; + + /** + * validators for the next block + * @format byte + */ + next_validators_hash?: string; + + /** + * consensus params for current block + * @format byte + */ + consensus_hash?: string; + + /** + * state after txs from the previous block + * @format byte + */ + app_hash?: string; + + /** + * root hash of all results from the txs from the previous block + * @format byte + */ + last_results_hash?: string; + + /** + * consensus info + * evidence included in the block + * @format byte + */ + evidence_hash?: string; + + /** + * original proposer of the block + * @format byte + */ + proposer_address?: string; +} + +export interface TypesLightBlock { + signed_header?: TypesSignedHeader; + validator_set?: TypesValidatorSet; +} + +/** + * LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client. + */ +export interface TypesLightClientAttackEvidence { + conflicting_block?: TypesLightBlock; + + /** @format int64 */ + common_height?: string; + byzantine_validators?: TenderminttypesValidator[]; + + /** @format int64 */ + total_voting_power?: string; + + /** @format date-time */ + timestamp?: string; +} + +export interface TypesPartSetHeader { + /** @format int64 */ + total?: number; + + /** @format byte */ + hash?: string; +} + +export interface TypesSignedHeader { + /** Header defines the structure of a block header. */ + header?: TypesHeader; + + /** Commit contains the evidence that a block was committed by a set of validators. */ + commit?: TypesCommit; +} + +/** +* SignedMsgType is a type of signed message in the consensus. + + - SIGNED_MSG_TYPE_PREVOTE: Votes + - SIGNED_MSG_TYPE_PROPOSAL: Proposals +*/ +export enum TypesSignedMsgType { + SIGNED_MSG_TYPE_UNKNOWN = "SIGNED_MSG_TYPE_UNKNOWN", + SIGNED_MSG_TYPE_PREVOTE = "SIGNED_MSG_TYPE_PREVOTE", + SIGNED_MSG_TYPE_PRECOMMIT = "SIGNED_MSG_TYPE_PRECOMMIT", + SIGNED_MSG_TYPE_PROPOSAL = "SIGNED_MSG_TYPE_PROPOSAL", +} + +export interface TypesValidatorSet { + validators?: TenderminttypesValidator[]; + proposer?: TenderminttypesValidator; + + /** @format int64 */ + total_voting_power?: string; +} + +/** +* Vote represents a prevote, precommit, or commit vote from validators for +consensus. +*/ +export interface TypesVote { + /** + * SignedMsgType is a type of signed message in the consensus. + * + * - SIGNED_MSG_TYPE_PREVOTE: Votes + * - SIGNED_MSG_TYPE_PROPOSAL: Proposals + */ + type?: TypesSignedMsgType; + + /** @format int64 */ + height?: string; + + /** @format int32 */ + round?: number; + + /** zero if vote is nil. */ + block_id?: TypesBlockID; + + /** @format date-time */ + timestamp?: string; + + /** @format byte */ + validator_address?: string; + + /** @format int32 */ + validator_index?: number; + + /** @format byte */ + signature?: string; +} + +/** + * ABCIMessageLog defines a structure containing an indexed tx ABCI message log. + */ +export interface V1Beta1ABCIMessageLog { + /** @format int64 */ + msg_index?: number; + log?: string; + + /** + * Events contains a slice of Event objects that were emitted during some + * execution. + */ + events?: V1Beta1StringEvent[]; +} + +/** +* Attribute defines an attribute wrapper where the key and value are +strings instead of raw bytes. +*/ +export interface V1Beta1Attribute { + key?: string; + value?: string; +} + +/** +* AuthInfo describes the fee and signer modes that are used to sign a +transaction. +*/ +export interface V1Beta1AuthInfo { + /** + * signer_infos defines the signing modes for the required signers. The number + * and order of elements must match the required signers from TxBody's + * messages. The first element is the primary signer and the one which pays + * the fee. + */ + signer_infos?: V1Beta1SignerInfo[]; + + /** + * Fee is the fee and gas limit for the transaction. The first signer is the + * primary signer and the one which pays the fee. The fee can be calculated + * based on the cost of evaluating the body and doing signature verification + * of the signers. This can be estimated via simulation. + */ + fee?: V1Beta1Fee; + + /** + * Tip is the optional tip used for transactions fees paid in another denom. + * + * This field is ignored if the chain didn't enable tips, i.e. didn't add the + * `TipDecorator` in its posthandler. + * Since: cosmos-sdk 0.46 + */ + tip?: V1Beta1Tip; +} + +/** +* BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method. + + - BROADCAST_MODE_UNSPECIFIED: zero-value for mode ordering + - BROADCAST_MODE_BLOCK: DEPRECATED: use BROADCAST_MODE_SYNC instead, +BROADCAST_MODE_BLOCK is not supported by the SDK from v0.47.x onwards. + - BROADCAST_MODE_SYNC: BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for +a CheckTx execution response only. + - BROADCAST_MODE_ASYNC: BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns +immediately. +*/ +export enum V1Beta1BroadcastMode { + BROADCAST_MODE_UNSPECIFIED = "BROADCAST_MODE_UNSPECIFIED", + BROADCAST_MODE_BLOCK = "BROADCAST_MODE_BLOCK", + BROADCAST_MODE_SYNC = "BROADCAST_MODE_SYNC", + BROADCAST_MODE_ASYNC = "BROADCAST_MODE_ASYNC", +} + +/** +* BroadcastTxRequest is the request type for the Service.BroadcastTxRequest +RPC method. +*/ +export interface V1Beta1BroadcastTxRequest { + /** + * tx_bytes is the raw transaction. + * @format byte + */ + tx_bytes?: string; + + /** + * BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method. + * + * - BROADCAST_MODE_UNSPECIFIED: zero-value for mode ordering + * - BROADCAST_MODE_BLOCK: DEPRECATED: use BROADCAST_MODE_SYNC instead, + * BROADCAST_MODE_BLOCK is not supported by the SDK from v0.47.x onwards. + * - BROADCAST_MODE_SYNC: BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for + * a CheckTx execution response only. + * - BROADCAST_MODE_ASYNC: BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns + * immediately. + */ + mode?: V1Beta1BroadcastMode; +} + +/** +* BroadcastTxResponse is the response type for the +Service.BroadcastTx method. +*/ +export interface V1Beta1BroadcastTxResponse { + /** tx_response is the queried TxResponses. */ + tx_response?: V1Beta1TxResponse; +} + +/** +* Coin defines a token with a denomination and an amount. + +NOTE: The amount field is an Int which implements the custom method +signatures required by gogoproto. +*/ +export interface V1Beta1Coin { + denom?: string; + amount?: string; +} + +/** +* CompactBitArray is an implementation of a space efficient bit array. +This is used to ensure that the encoded data takes up a minimal amount of +space after proto encoding. +This is not thread safe, and is not intended for concurrent usage. +*/ +export interface V1Beta1CompactBitArray { + /** @format int64 */ + extra_bits_stored?: number; + + /** @format byte */ + elems?: string; +} + +/** +* Fee includes the amount of coins paid in fees and the maximum +gas to be used by the transaction. The ratio yields an effective "gasprice", +which must be above some miminum to be accepted into the mempool. +*/ +export interface V1Beta1Fee { + /** amount is the amount of coins to be paid as a fee */ + amount?: V1Beta1Coin[]; + + /** + * gas_limit is the maximum gas that can be used in transaction processing + * before an out of gas error occurs + * @format uint64 + */ + gas_limit?: string; + + /** + * if unset, the first signer is responsible for paying the fees. If set, the specified account must pay the fees. + * the payer must be a tx signer (and thus have signed this field in AuthInfo). + * setting this field does *not* change the ordering of required signers for the transaction. + */ + payer?: string; + + /** + * if set, the fee payer (either the first signer or the value of the payer field) requests that a fee grant be used + * to pay fees instead of the fee payer's own balance. If an appropriate fee grant does not exist or the chain does + * not support fee grants, this will fail + */ + granter?: string; +} + +/** + * GasInfo defines tx execution gas context. + */ +export interface V1Beta1GasInfo { + /** + * GasWanted is the maximum units of work we allow this tx to perform. + * @format uint64 + */ + gas_wanted?: string; + + /** + * GasUsed is the amount of gas actually consumed. + * @format uint64 + */ + gas_used?: string; +} + +/** +* GetBlockWithTxsResponse is the response type for the Service.GetBlockWithTxs method. + +Since: cosmos-sdk 0.45.2 +*/ +export interface V1Beta1GetBlockWithTxsResponse { + /** txs are the transactions in the block. */ + txs?: V1Beta1Tx[]; + block_id?: TypesBlockID; + block?: TypesBlock; + + /** pagination defines a pagination for the response. */ + pagination?: V1Beta1PageResponse; +} + +/** + * GetTxResponse is the response type for the Service.GetTx method. + */ +export interface V1Beta1GetTxResponse { + /** tx is the queried transaction. */ + tx?: V1Beta1Tx; + + /** tx_response is the queried TxResponses. */ + tx_response?: V1Beta1TxResponse; +} + +/** +* GetTxsEventResponse is the response type for the Service.TxsByEvents +RPC method. +*/ +export interface V1Beta1GetTxsEventResponse { + /** txs is the list of queried transactions. */ + txs?: V1Beta1Tx[]; + + /** tx_responses is the list of queried TxResponses. */ + tx_responses?: V1Beta1TxResponse[]; + + /** + * pagination defines a pagination for the response. + * Deprecated post v0.46.x: use total instead. + */ + pagination?: V1Beta1PageResponse; + + /** + * total is total number of results available + * @format uint64 + */ + total?: string; +} + +/** + * ModeInfo describes the signing mode of a single or nested multisig signer. + */ +export interface V1Beta1ModeInfo { + /** single represents a single signer */ + single?: V1Beta1ModeInfoSingle; + + /** multi represents a nested multisig signer */ + multi?: V1Beta1ModeInfoMulti; +} + +export interface V1Beta1ModeInfoMulti { + /** + * bitarray specifies which keys within the multisig are signing + * CompactBitArray is an implementation of a space efficient bit array. + * This is used to ensure that the encoded data takes up a minimal amount of + * space after proto encoding. + * This is not thread safe, and is not intended for concurrent usage. + */ + bitarray?: V1Beta1CompactBitArray; + + /** + * mode_infos is the corresponding modes of the signers of the multisig + * which could include nested multisig public keys + */ + mode_infos?: V1Beta1ModeInfo[]; +} + +export interface V1Beta1ModeInfoSingle { + /** + * mode is the signing mode of the single signer + * SignMode represents a signing mode with its own security guarantees. + * + * This enum should be considered a registry of all known sign modes + * in the Cosmos ecosystem. Apps are not expected to support all known + * sign modes. Apps that would like to support custom sign modes are + * encouraged to open a small PR against this file to add a new case + * to this SignMode enum describing their sign mode so that different + * apps have a consistent version of this enum. + * - SIGN_MODE_UNSPECIFIED: SIGN_MODE_UNSPECIFIED specifies an unknown signing mode and will be + * rejected. + * - SIGN_MODE_DIRECT: SIGN_MODE_DIRECT specifies a signing mode which uses SignDoc and is + * verified with raw bytes from Tx. + * - SIGN_MODE_TEXTUAL: SIGN_MODE_TEXTUAL is a future signing mode that will verify some + * human-readable textual representation on top of the binary representation + * from SIGN_MODE_DIRECT. It is currently not supported. + * - SIGN_MODE_DIRECT_AUX: SIGN_MODE_DIRECT_AUX specifies a signing mode which uses + * SignDocDirectAux. As opposed to SIGN_MODE_DIRECT, this sign mode does not + * require signers signing over other signers' `signer_info`. It also allows + * for adding Tips in transactions. + * Since: cosmos-sdk 0.46 + * - SIGN_MODE_LEGACY_AMINO_JSON: SIGN_MODE_LEGACY_AMINO_JSON is a backwards compatibility mode which uses + * Amino JSON and will be removed in the future. + * - SIGN_MODE_EIP_191: SIGN_MODE_EIP_191 specifies the sign mode for EIP 191 signing on the Cosmos + * SDK. Ref: https://eips.ethereum.org/EIPS/eip-191 + * Currently, SIGN_MODE_EIP_191 is registered as a SignMode enum variant, + * but is not implemented on the SDK by default. To enable EIP-191, you need + * to pass a custom `TxConfig` that has an implementation of + * `SignModeHandler` for EIP-191. The SDK may decide to fully support + * EIP-191 in the future. + * Since: cosmos-sdk 0.45.2 + */ + mode?: V1Beta1SignMode; +} + +/** +* - ORDER_BY_UNSPECIFIED: ORDER_BY_UNSPECIFIED specifies an unknown sorting order. OrderBy defaults to ASC in this case. + - ORDER_BY_ASC: ORDER_BY_ASC defines ascending order + - ORDER_BY_DESC: ORDER_BY_DESC defines descending order +*/ +export enum V1Beta1OrderBy { + ORDER_BY_UNSPECIFIED = "ORDER_BY_UNSPECIFIED", + ORDER_BY_ASC = "ORDER_BY_ASC", + ORDER_BY_DESC = "ORDER_BY_DESC", +} + +/** +* message SomeRequest { + Foo some_parameter = 1; + PageRequest pagination = 2; + } +*/ +export interface V1Beta1PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + * @format byte + */ + key?: string; + + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + * @format uint64 + */ + offset?: string; + + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + * @format uint64 + */ + limit?: string; + + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + count_total?: boolean; + + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse?: boolean; +} + +/** +* PageResponse is to be embedded in gRPC response messages where the +corresponding request message has used PageRequest. + + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } +*/ +export interface V1Beta1PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + * @format byte + */ + next_key?: string; + + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + * @format uint64 + */ + total?: string; +} + +/** +* SignMode represents a signing mode with its own security guarantees. + +This enum should be considered a registry of all known sign modes +in the Cosmos ecosystem. Apps are not expected to support all known +sign modes. Apps that would like to support custom sign modes are +encouraged to open a small PR against this file to add a new case +to this SignMode enum describing their sign mode so that different +apps have a consistent version of this enum. + + - SIGN_MODE_UNSPECIFIED: SIGN_MODE_UNSPECIFIED specifies an unknown signing mode and will be +rejected. + - SIGN_MODE_DIRECT: SIGN_MODE_DIRECT specifies a signing mode which uses SignDoc and is +verified with raw bytes from Tx. + - SIGN_MODE_TEXTUAL: SIGN_MODE_TEXTUAL is a future signing mode that will verify some +human-readable textual representation on top of the binary representation +from SIGN_MODE_DIRECT. It is currently not supported. + - SIGN_MODE_DIRECT_AUX: SIGN_MODE_DIRECT_AUX specifies a signing mode which uses +SignDocDirectAux. As opposed to SIGN_MODE_DIRECT, this sign mode does not +require signers signing over other signers' `signer_info`. It also allows +for adding Tips in transactions. + +Since: cosmos-sdk 0.46 + - SIGN_MODE_LEGACY_AMINO_JSON: SIGN_MODE_LEGACY_AMINO_JSON is a backwards compatibility mode which uses +Amino JSON and will be removed in the future. + - SIGN_MODE_EIP_191: SIGN_MODE_EIP_191 specifies the sign mode for EIP 191 signing on the Cosmos +SDK. Ref: https://eips.ethereum.org/EIPS/eip-191 + +Currently, SIGN_MODE_EIP_191 is registered as a SignMode enum variant, +but is not implemented on the SDK by default. To enable EIP-191, you need +to pass a custom `TxConfig` that has an implementation of +`SignModeHandler` for EIP-191. The SDK may decide to fully support +EIP-191 in the future. + +Since: cosmos-sdk 0.45.2 +*/ +export enum V1Beta1SignMode { + SIGN_MODE_UNSPECIFIED = "SIGN_MODE_UNSPECIFIED", + SIGN_MODE_DIRECT = "SIGN_MODE_DIRECT", + SIGN_MODE_TEXTUAL = "SIGN_MODE_TEXTUAL", + SIGN_MODE_DIRECT_AUX = "SIGN_MODE_DIRECT_AUX", + SIGN_MODE_LEGACY_AMINO_JSON = "SIGN_MODE_LEGACY_AMINO_JSON", + SIGNMODEEIP191 = "SIGN_MODE_EIP_191", +} + +/** +* SignerInfo describes the public key and signing mode of a single top-level +signer. +*/ +export interface V1Beta1SignerInfo { + /** + * public_key is the public key of the signer. It is optional for accounts + * that already exist in state. If unset, the verifier can use the required \ + * signer address for this position and lookup the public key. + */ + public_key?: ProtobufAny; + + /** + * mode_info describes the signing mode of the signer and is a nested + * structure to support nested multisig pubkey's + * ModeInfo describes the signing mode of a single or nested multisig signer. + */ + mode_info?: V1Beta1ModeInfo; + + /** + * sequence is the sequence of the account, which describes the + * number of committed transactions signed by a given address. It is used to + * prevent replay attacks. + * @format uint64 + */ + sequence?: string; +} + +/** +* SimulateRequest is the request type for the Service.Simulate +RPC method. +*/ +export interface V1Beta1SimulateRequest { + /** + * tx is the transaction to simulate. + * Deprecated. Send raw tx bytes instead. + */ + tx?: V1Beta1Tx; + + /** + * tx_bytes is the raw transaction. + * + * Since: cosmos-sdk 0.43 + * @format byte + */ + tx_bytes?: string; +} + +/** +* SimulateResponse is the response type for the +Service.SimulateRPC method. +*/ +export interface V1Beta1SimulateResponse { + /** gas_info is the information about gas used in the simulation. */ + gas_info?: V1Beta1GasInfo; + + /** result is the result of the simulation. */ + result?: Abciv1Beta1Result; +} + +/** +* StringEvent defines en Event object wrapper where all the attributes +contain key/value pairs that are strings instead of raw bytes. +*/ +export interface V1Beta1StringEvent { + type?: string; + attributes?: V1Beta1Attribute[]; +} + +/** +* Tip is the tip used for meta-transactions. + +Since: cosmos-sdk 0.46 +*/ +export interface V1Beta1Tip { + /** amount is the amount of the tip */ + amount?: V1Beta1Coin[]; + + /** tipper is the address of the account paying for the tip */ + tipper?: string; +} + +/** + * Tx is the standard type used for broadcasting transactions. + */ +export interface V1Beta1Tx { + /** + * body is the processable content of the transaction + * TxBody is the body of a transaction that all signers sign over. + */ + body?: V1Beta1TxBody; + + /** + * auth_info is the authorization related content of the transaction, + * specifically signers, signer modes and fee + * AuthInfo describes the fee and signer modes that are used to sign a + * transaction. + */ + auth_info?: V1Beta1AuthInfo; + + /** + * signatures is a list of signatures that matches the length and order of + * AuthInfo's signer_infos to allow connecting signature meta information like + * public key and signing mode by position. + */ + signatures?: string[]; +} + +/** + * TxBody is the body of a transaction that all signers sign over. + */ +export interface V1Beta1TxBody { + /** + * messages is a list of messages to be executed. The required signers of + * those messages define the number and order of elements in AuthInfo's + * signer_infos and Tx's signatures. Each required signer address is added to + * the list only the first time it occurs. + * By convention, the first required signer (usually from the first message) + * is referred to as the primary signer and pays the fee for the whole + * transaction. + */ + messages?: ProtobufAny[]; + + /** + * memo is any arbitrary note/comment to be added to the transaction. + * WARNING: in clients, any publicly exposed text should not be called memo, + * but should be called `note` instead (see https://github.com/cosmos/cosmos-sdk/issues/9122). + */ + memo?: string; + + /** + * timeout is the block height after which this transaction will not + * be processed by the chain + * @format uint64 + */ + timeout_height?: string; + + /** + * extension_options are arbitrary options that can be added by chains + * when the default options are not sufficient. If any of these are present + * and can't be handled, the transaction will be rejected + */ + extension_options?: ProtobufAny[]; + + /** + * extension_options are arbitrary options that can be added by chains + * when the default options are not sufficient. If any of these are present + * and can't be handled, they will be ignored + */ + non_critical_extension_options?: ProtobufAny[]; +} + +/** +* TxDecodeAminoRequest is the request type for the Service.TxDecodeAmino +RPC method. + +Since: cosmos-sdk 0.47 +*/ +export interface V1Beta1TxDecodeAminoRequest { + /** @format byte */ + amino_binary?: string; +} + +/** +* TxDecodeAminoResponse is the response type for the Service.TxDecodeAmino +RPC method. + +Since: cosmos-sdk 0.47 +*/ +export interface V1Beta1TxDecodeAminoResponse { + amino_json?: string; +} + +/** +* TxDecodeRequest is the request type for the Service.TxDecode +RPC method. + +Since: cosmos-sdk 0.47 +*/ +export interface V1Beta1TxDecodeRequest { + /** + * tx_bytes is the raw transaction. + * @format byte + */ + tx_bytes?: string; +} + +/** +* TxDecodeResponse is the response type for the +Service.TxDecode method. + +Since: cosmos-sdk 0.47 +*/ +export interface V1Beta1TxDecodeResponse { + /** tx is the decoded transaction. */ + tx?: V1Beta1Tx; +} + +/** +* TxEncodeAminoRequest is the request type for the Service.TxEncodeAmino +RPC method. + +Since: cosmos-sdk 0.47 +*/ +export interface V1Beta1TxEncodeAminoRequest { + amino_json?: string; +} + +/** +* TxEncodeAminoResponse is the response type for the Service.TxEncodeAmino +RPC method. + +Since: cosmos-sdk 0.47 +*/ +export interface V1Beta1TxEncodeAminoResponse { + /** @format byte */ + amino_binary?: string; +} + +/** +* TxEncodeRequest is the request type for the Service.TxEncode +RPC method. + +Since: cosmos-sdk 0.47 +*/ +export interface V1Beta1TxEncodeRequest { + /** tx is the transaction to encode. */ + tx?: V1Beta1Tx; +} + +/** +* TxEncodeResponse is the response type for the +Service.TxEncode method. + +Since: cosmos-sdk 0.47 +*/ +export interface V1Beta1TxEncodeResponse { + /** + * tx_bytes is the encoded transaction bytes. + * @format byte + */ + tx_bytes?: string; +} + +/** +* TxResponse defines a structure containing relevant tx data and metadata. The +tags are stringified and the log is JSON decoded. +*/ +export interface V1Beta1TxResponse { + /** + * The block height + * @format int64 + */ + height?: string; + + /** The transaction hash. */ + txhash?: string; + + /** Namespace for the Code */ + codespace?: string; + + /** + * Response code. + * @format int64 + */ + code?: number; + + /** Result bytes, if any. */ + data?: string; + + /** + * The output of the application's logger (raw string). May be + * non-deterministic. + */ + raw_log?: string; + + /** The output of the application's logger (typed). May be non-deterministic. */ + logs?: V1Beta1ABCIMessageLog[]; + + /** Additional information. May be non-deterministic. */ + info?: string; + + /** + * Amount of gas requested for transaction. + * @format int64 + */ + gas_wanted?: string; + + /** + * Amount of gas consumed by transaction. + * @format int64 + */ + gas_used?: string; + + /** The request transaction bytes. */ + tx?: ProtobufAny; + + /** + * Time of the previous block. For heights > 1, it's the weighted median of + * the timestamps of the valid votes in the block.LastCommit. For height == 1, + * it's genesis time. + */ + timestamp?: string; + + /** + * Events defines all the events emitted by processing a transaction. Note, + * these events include those emitted by processing all the messages and those + * emitted from the ante. Whereas Logs contains the events, with + * additional metadata, emitted only by processing the messages. + * + * Since: cosmos-sdk 0.42.11, 0.44.5, 0.45 + */ + events?: AbciEvent[]; +} + +/** +* Consensus captures the consensus rules for processing a block in the blockchain, +including all blockchain data structures and the rules of the application's +state transition machine. +*/ +export interface VersionConsensus { + /** @format uint64 */ + block?: string; + + /** @format uint64 */ + app?: string; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title cosmos/tx/v1beta1/service.proto + * @version version not set + */ +export class Api extends HttpClient { + /** + * @description Since: cosmos-sdk 0.47 + * + * @tags Service + * @name ServiceTxDecode + * @summary TxDecode decodes the transaction. + * @request POST:/cosmos/tx/v1beta1/decode + */ + serviceTxDecode = (body: V1Beta1TxDecodeRequest, params: RequestParams = {}) => + this.request({ + path: `/cosmos/tx/v1beta1/decode`, + method: "POST", + body: body, + type: ContentType.Json, + format: "json", + ...params, + }); + + /** + * @description Since: cosmos-sdk 0.47 + * + * @tags Service + * @name ServiceTxDecodeAmino + * @summary TxDecodeAmino decodes an Amino transaction from encoded bytes to JSON. + * @request POST:/cosmos/tx/v1beta1/decode/amino + */ + serviceTxDecodeAmino = (body: V1Beta1TxDecodeAminoRequest, params: RequestParams = {}) => + this.request({ + path: `/cosmos/tx/v1beta1/decode/amino`, + method: "POST", + body: body, + type: ContentType.Json, + format: "json", + ...params, + }); + + /** + * @description Since: cosmos-sdk 0.47 + * + * @tags Service + * @name ServiceTxEncode + * @summary TxEncode encodes the transaction. + * @request POST:/cosmos/tx/v1beta1/encode + */ + serviceTxEncode = (body: V1Beta1TxEncodeRequest, params: RequestParams = {}) => + this.request({ + path: `/cosmos/tx/v1beta1/encode`, + method: "POST", + body: body, + type: ContentType.Json, + format: "json", + ...params, + }); + + /** + * @description Since: cosmos-sdk 0.47 + * + * @tags Service + * @name ServiceTxEncodeAmino + * @summary TxEncodeAmino encodes an Amino transaction from JSON to encoded bytes. + * @request POST:/cosmos/tx/v1beta1/encode/amino + */ + serviceTxEncodeAmino = (body: V1Beta1TxEncodeAminoRequest, params: RequestParams = {}) => + this.request({ + path: `/cosmos/tx/v1beta1/encode/amino`, + method: "POST", + body: body, + type: ContentType.Json, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Service + * @name ServiceSimulate + * @summary Simulate simulates executing a transaction for estimating gas usage. + * @request POST:/cosmos/tx/v1beta1/simulate + */ + serviceSimulate = (body: V1Beta1SimulateRequest, params: RequestParams = {}) => + this.request({ + path: `/cosmos/tx/v1beta1/simulate`, + method: "POST", + body: body, + type: ContentType.Json, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Service + * @name ServiceGetTxsEvent + * @summary GetTxsEvent fetches txs by event. + * @request GET:/cosmos/tx/v1beta1/txs + */ + serviceGetTxsEvent = ( + query?: { + events?: string[]; + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + order_by?: "ORDER_BY_UNSPECIFIED" | "ORDER_BY_ASC" | "ORDER_BY_DESC"; + page?: string; + limit?: string; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/cosmos/tx/v1beta1/txs`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Service + * @name ServiceBroadcastTx + * @summary BroadcastTx broadcast transaction. + * @request POST:/cosmos/tx/v1beta1/txs + */ + serviceBroadcastTx = (body: V1Beta1BroadcastTxRequest, params: RequestParams = {}) => + this.request({ + path: `/cosmos/tx/v1beta1/txs`, + method: "POST", + body: body, + type: ContentType.Json, + format: "json", + ...params, + }); + + /** + * @description Since: cosmos-sdk 0.45.2 + * + * @tags Service + * @name ServiceGetBlockWithTxs + * @summary GetBlockWithTxs fetches a block with decoded txs. + * @request GET:/cosmos/tx/v1beta1/txs/block/{height} + */ + serviceGetBlockWithTxs = ( + height: string, + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/cosmos/tx/v1beta1/txs/block/${height}`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Service + * @name ServiceGetTx + * @summary GetTx fetches a tx by hash. + * @request GET:/cosmos/tx/v1beta1/txs/{hash} + */ + serviceGetTx = (hash: string, params: RequestParams = {}) => + this.request({ + path: `/cosmos/tx/v1beta1/txs/${hash}`, + method: "GET", + format: "json", + ...params, + }); +} diff --git a/ts-client/cosmos.tx.v1beta1/types.ts b/ts-client/cosmos.tx.v1beta1/types.ts new file mode 100755 index 000000000..b90ba7830 --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types.ts @@ -0,0 +1,31 @@ +import { Tx } from "./types/cosmos/tx/v1beta1/tx" +import { TxRaw } from "./types/cosmos/tx/v1beta1/tx" +import { SignDoc } from "./types/cosmos/tx/v1beta1/tx" +import { SignDocDirectAux } from "./types/cosmos/tx/v1beta1/tx" +import { TxBody } from "./types/cosmos/tx/v1beta1/tx" +import { AuthInfo } from "./types/cosmos/tx/v1beta1/tx" +import { SignerInfo } from "./types/cosmos/tx/v1beta1/tx" +import { ModeInfo } from "./types/cosmos/tx/v1beta1/tx" +import { ModeInfo_Single } from "./types/cosmos/tx/v1beta1/tx" +import { ModeInfo_Multi } from "./types/cosmos/tx/v1beta1/tx" +import { Fee } from "./types/cosmos/tx/v1beta1/tx" +import { Tip } from "./types/cosmos/tx/v1beta1/tx" +import { AuxSignerData } from "./types/cosmos/tx/v1beta1/tx" + + +export { + Tx, + TxRaw, + SignDoc, + SignDocDirectAux, + TxBody, + AuthInfo, + SignerInfo, + ModeInfo, + ModeInfo_Single, + ModeInfo_Multi, + Fee, + Tip, + AuxSignerData, + + } \ No newline at end of file diff --git a/ts-client/cosmos.tx.v1beta1/types/amino/amino.ts b/ts-client/cosmos.tx.v1beta1/types/amino/amino.ts new file mode 100644 index 000000000..4b67dcfe4 --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/amino/amino.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "amino"; diff --git a/ts-client/cosmos.tx.v1beta1/types/cosmos/base/abci/v1beta1/abci.ts b/ts-client/cosmos.tx.v1beta1/types/cosmos/base/abci/v1beta1/abci.ts new file mode 100644 index 000000000..b147306d8 --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/cosmos/base/abci/v1beta1/abci.ts @@ -0,0 +1,1040 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Any } from "../../../../google/protobuf/any"; +import { Event } from "../../../../tendermint/abci/types"; + +export const protobufPackage = "cosmos.base.abci.v1beta1"; + +/** + * TxResponse defines a structure containing relevant tx data and metadata. The + * tags are stringified and the log is JSON decoded. + */ +export interface TxResponse { + /** The block height */ + height: number; + /** The transaction hash. */ + txhash: string; + /** Namespace for the Code */ + codespace: string; + /** Response code. */ + code: number; + /** Result bytes, if any. */ + data: string; + /** + * The output of the application's logger (raw string). May be + * non-deterministic. + */ + rawLog: string; + /** The output of the application's logger (typed). May be non-deterministic. */ + logs: ABCIMessageLog[]; + /** Additional information. May be non-deterministic. */ + info: string; + /** Amount of gas requested for transaction. */ + gasWanted: number; + /** Amount of gas consumed by transaction. */ + gasUsed: number; + /** The request transaction bytes. */ + tx: + | Any + | undefined; + /** + * Time of the previous block. For heights > 1, it's the weighted median of + * the timestamps of the valid votes in the block.LastCommit. For height == 1, + * it's genesis time. + */ + timestamp: string; + /** + * Events defines all the events emitted by processing a transaction. Note, + * these events include those emitted by processing all the messages and those + * emitted from the ante. Whereas Logs contains the events, with + * additional metadata, emitted only by processing the messages. + * + * Since: cosmos-sdk 0.42.11, 0.44.5, 0.45 + */ + events: Event[]; +} + +/** ABCIMessageLog defines a structure containing an indexed tx ABCI message log. */ +export interface ABCIMessageLog { + msgIndex: number; + log: string; + /** + * Events contains a slice of Event objects that were emitted during some + * execution. + */ + events: StringEvent[]; +} + +/** + * StringEvent defines en Event object wrapper where all the attributes + * contain key/value pairs that are strings instead of raw bytes. + */ +export interface StringEvent { + type: string; + attributes: Attribute[]; +} + +/** + * Attribute defines an attribute wrapper where the key and value are + * strings instead of raw bytes. + */ +export interface Attribute { + key: string; + value: string; +} + +/** GasInfo defines tx execution gas context. */ +export interface GasInfo { + /** GasWanted is the maximum units of work we allow this tx to perform. */ + gasWanted: number; + /** GasUsed is the amount of gas actually consumed. */ + gasUsed: number; +} + +/** Result is the union of ResponseFormat and ResponseCheckTx. */ +export interface Result { + /** + * Data is any data returned from message or handler execution. It MUST be + * length prefixed in order to separate data from multiple message executions. + * Deprecated. This field is still populated, but prefer msg_response instead + * because it also contains the Msg response typeURL. + * + * @deprecated + */ + data: Uint8Array; + /** Log contains the log information from message or handler execution. */ + log: string; + /** + * Events contains a slice of Event objects that were emitted during message + * or handler execution. + */ + events: Event[]; + /** + * msg_responses contains the Msg handler responses type packed in Anys. + * + * Since: cosmos-sdk 0.46 + */ + msgResponses: Any[]; +} + +/** + * SimulationResponse defines the response generated when a transaction is + * successfully simulated. + */ +export interface SimulationResponse { + gasInfo: GasInfo | undefined; + result: Result | undefined; +} + +/** + * MsgData defines the data returned in a Result object during message + * execution. + * + * @deprecated + */ +export interface MsgData { + msgType: string; + data: Uint8Array; +} + +/** + * TxMsgData defines a list of MsgData. A transaction will have a MsgData object + * for each message. + */ +export interface TxMsgData { + /** + * data field is deprecated and not populated. + * + * @deprecated + */ + data: MsgData[]; + /** + * msg_responses contains the Msg handler responses packed into Anys. + * + * Since: cosmos-sdk 0.46 + */ + msgResponses: Any[]; +} + +/** SearchTxsResult defines a structure for querying txs pageable */ +export interface SearchTxsResult { + /** Count of all txs */ + totalCount: number; + /** Count of txs in current page */ + count: number; + /** Index of current page, start from 1 */ + pageNumber: number; + /** Count of total pages */ + pageTotal: number; + /** Max count txs per page */ + limit: number; + /** List of txs in current page */ + txs: TxResponse[]; +} + +function createBaseTxResponse(): TxResponse { + return { + height: 0, + txhash: "", + codespace: "", + code: 0, + data: "", + rawLog: "", + logs: [], + info: "", + gasWanted: 0, + gasUsed: 0, + tx: undefined, + timestamp: "", + events: [], + }; +} + +export const TxResponse = { + encode(message: TxResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.height !== 0) { + writer.uint32(8).int64(message.height); + } + if (message.txhash !== "") { + writer.uint32(18).string(message.txhash); + } + if (message.codespace !== "") { + writer.uint32(26).string(message.codespace); + } + if (message.code !== 0) { + writer.uint32(32).uint32(message.code); + } + if (message.data !== "") { + writer.uint32(42).string(message.data); + } + if (message.rawLog !== "") { + writer.uint32(50).string(message.rawLog); + } + for (const v of message.logs) { + ABCIMessageLog.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.info !== "") { + writer.uint32(66).string(message.info); + } + if (message.gasWanted !== 0) { + writer.uint32(72).int64(message.gasWanted); + } + if (message.gasUsed !== 0) { + writer.uint32(80).int64(message.gasUsed); + } + if (message.tx !== undefined) { + Any.encode(message.tx, writer.uint32(90).fork()).ldelim(); + } + if (message.timestamp !== "") { + writer.uint32(98).string(message.timestamp); + } + for (const v of message.events) { + Event.encode(v!, writer.uint32(106).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.height = longToNumber(reader.int64() as Long); + break; + case 2: + message.txhash = reader.string(); + break; + case 3: + message.codespace = reader.string(); + break; + case 4: + message.code = reader.uint32(); + break; + case 5: + message.data = reader.string(); + break; + case 6: + message.rawLog = reader.string(); + break; + case 7: + message.logs.push(ABCIMessageLog.decode(reader, reader.uint32())); + break; + case 8: + message.info = reader.string(); + break; + case 9: + message.gasWanted = longToNumber(reader.int64() as Long); + break; + case 10: + message.gasUsed = longToNumber(reader.int64() as Long); + break; + case 11: + message.tx = Any.decode(reader, reader.uint32()); + break; + case 12: + message.timestamp = reader.string(); + break; + case 13: + message.events.push(Event.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxResponse { + return { + height: isSet(object.height) ? Number(object.height) : 0, + txhash: isSet(object.txhash) ? String(object.txhash) : "", + codespace: isSet(object.codespace) ? String(object.codespace) : "", + code: isSet(object.code) ? Number(object.code) : 0, + data: isSet(object.data) ? String(object.data) : "", + rawLog: isSet(object.rawLog) ? String(object.rawLog) : "", + logs: Array.isArray(object?.logs) ? object.logs.map((e: any) => ABCIMessageLog.fromJSON(e)) : [], + info: isSet(object.info) ? String(object.info) : "", + gasWanted: isSet(object.gasWanted) ? Number(object.gasWanted) : 0, + gasUsed: isSet(object.gasUsed) ? Number(object.gasUsed) : 0, + tx: isSet(object.tx) ? Any.fromJSON(object.tx) : undefined, + timestamp: isSet(object.timestamp) ? String(object.timestamp) : "", + events: Array.isArray(object?.events) ? object.events.map((e: any) => Event.fromJSON(e)) : [], + }; + }, + + toJSON(message: TxResponse): unknown { + const obj: any = {}; + message.height !== undefined && (obj.height = Math.round(message.height)); + message.txhash !== undefined && (obj.txhash = message.txhash); + message.codespace !== undefined && (obj.codespace = message.codespace); + message.code !== undefined && (obj.code = Math.round(message.code)); + message.data !== undefined && (obj.data = message.data); + message.rawLog !== undefined && (obj.rawLog = message.rawLog); + if (message.logs) { + obj.logs = message.logs.map((e) => e ? ABCIMessageLog.toJSON(e) : undefined); + } else { + obj.logs = []; + } + message.info !== undefined && (obj.info = message.info); + message.gasWanted !== undefined && (obj.gasWanted = Math.round(message.gasWanted)); + message.gasUsed !== undefined && (obj.gasUsed = Math.round(message.gasUsed)); + message.tx !== undefined && (obj.tx = message.tx ? Any.toJSON(message.tx) : undefined); + message.timestamp !== undefined && (obj.timestamp = message.timestamp); + if (message.events) { + obj.events = message.events.map((e) => e ? Event.toJSON(e) : undefined); + } else { + obj.events = []; + } + return obj; + }, + + fromPartial, I>>(object: I): TxResponse { + const message = createBaseTxResponse(); + message.height = object.height ?? 0; + message.txhash = object.txhash ?? ""; + message.codespace = object.codespace ?? ""; + message.code = object.code ?? 0; + message.data = object.data ?? ""; + message.rawLog = object.rawLog ?? ""; + message.logs = object.logs?.map((e) => ABCIMessageLog.fromPartial(e)) || []; + message.info = object.info ?? ""; + message.gasWanted = object.gasWanted ?? 0; + message.gasUsed = object.gasUsed ?? 0; + message.tx = (object.tx !== undefined && object.tx !== null) ? Any.fromPartial(object.tx) : undefined; + message.timestamp = object.timestamp ?? ""; + message.events = object.events?.map((e) => Event.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseABCIMessageLog(): ABCIMessageLog { + return { msgIndex: 0, log: "", events: [] }; +} + +export const ABCIMessageLog = { + encode(message: ABCIMessageLog, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.msgIndex !== 0) { + writer.uint32(8).uint32(message.msgIndex); + } + if (message.log !== "") { + writer.uint32(18).string(message.log); + } + for (const v of message.events) { + StringEvent.encode(v!, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ABCIMessageLog { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseABCIMessageLog(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.msgIndex = reader.uint32(); + break; + case 2: + message.log = reader.string(); + break; + case 3: + message.events.push(StringEvent.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ABCIMessageLog { + return { + msgIndex: isSet(object.msgIndex) ? Number(object.msgIndex) : 0, + log: isSet(object.log) ? String(object.log) : "", + events: Array.isArray(object?.events) ? object.events.map((e: any) => StringEvent.fromJSON(e)) : [], + }; + }, + + toJSON(message: ABCIMessageLog): unknown { + const obj: any = {}; + message.msgIndex !== undefined && (obj.msgIndex = Math.round(message.msgIndex)); + message.log !== undefined && (obj.log = message.log); + if (message.events) { + obj.events = message.events.map((e) => e ? StringEvent.toJSON(e) : undefined); + } else { + obj.events = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ABCIMessageLog { + const message = createBaseABCIMessageLog(); + message.msgIndex = object.msgIndex ?? 0; + message.log = object.log ?? ""; + message.events = object.events?.map((e) => StringEvent.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseStringEvent(): StringEvent { + return { type: "", attributes: [] }; +} + +export const StringEvent = { + encode(message: StringEvent, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== "") { + writer.uint32(10).string(message.type); + } + for (const v of message.attributes) { + Attribute.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): StringEvent { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStringEvent(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.type = reader.string(); + break; + case 2: + message.attributes.push(Attribute.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): StringEvent { + return { + type: isSet(object.type) ? String(object.type) : "", + attributes: Array.isArray(object?.attributes) ? object.attributes.map((e: any) => Attribute.fromJSON(e)) : [], + }; + }, + + toJSON(message: StringEvent): unknown { + const obj: any = {}; + message.type !== undefined && (obj.type = message.type); + if (message.attributes) { + obj.attributes = message.attributes.map((e) => e ? Attribute.toJSON(e) : undefined); + } else { + obj.attributes = []; + } + return obj; + }, + + fromPartial, I>>(object: I): StringEvent { + const message = createBaseStringEvent(); + message.type = object.type ?? ""; + message.attributes = object.attributes?.map((e) => Attribute.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseAttribute(): Attribute { + return { key: "", value: "" }; +} + +export const Attribute = { + encode(message: Attribute, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Attribute { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAttribute(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.string(); + break; + case 2: + message.value = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Attribute { + return { key: isSet(object.key) ? String(object.key) : "", value: isSet(object.value) ? String(object.value) : "" }; + }, + + toJSON(message: Attribute): unknown { + const obj: any = {}; + message.key !== undefined && (obj.key = message.key); + message.value !== undefined && (obj.value = message.value); + return obj; + }, + + fromPartial, I>>(object: I): Attribute { + const message = createBaseAttribute(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBaseGasInfo(): GasInfo { + return { gasWanted: 0, gasUsed: 0 }; +} + +export const GasInfo = { + encode(message: GasInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.gasWanted !== 0) { + writer.uint32(8).uint64(message.gasWanted); + } + if (message.gasUsed !== 0) { + writer.uint32(16).uint64(message.gasUsed); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GasInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGasInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.gasWanted = longToNumber(reader.uint64() as Long); + break; + case 2: + message.gasUsed = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GasInfo { + return { + gasWanted: isSet(object.gasWanted) ? Number(object.gasWanted) : 0, + gasUsed: isSet(object.gasUsed) ? Number(object.gasUsed) : 0, + }; + }, + + toJSON(message: GasInfo): unknown { + const obj: any = {}; + message.gasWanted !== undefined && (obj.gasWanted = Math.round(message.gasWanted)); + message.gasUsed !== undefined && (obj.gasUsed = Math.round(message.gasUsed)); + return obj; + }, + + fromPartial, I>>(object: I): GasInfo { + const message = createBaseGasInfo(); + message.gasWanted = object.gasWanted ?? 0; + message.gasUsed = object.gasUsed ?? 0; + return message; + }, +}; + +function createBaseResult(): Result { + return { data: new Uint8Array(), log: "", events: [], msgResponses: [] }; +} + +export const Result = { + encode(message: Result, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.data.length !== 0) { + writer.uint32(10).bytes(message.data); + } + if (message.log !== "") { + writer.uint32(18).string(message.log); + } + for (const v of message.events) { + Event.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.msgResponses) { + Any.encode(v!, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Result { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResult(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.data = reader.bytes(); + break; + case 2: + message.log = reader.string(); + break; + case 3: + message.events.push(Event.decode(reader, reader.uint32())); + break; + case 4: + message.msgResponses.push(Any.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Result { + return { + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(), + log: isSet(object.log) ? String(object.log) : "", + events: Array.isArray(object?.events) ? object.events.map((e: any) => Event.fromJSON(e)) : [], + msgResponses: Array.isArray(object?.msgResponses) ? object.msgResponses.map((e: any) => Any.fromJSON(e)) : [], + }; + }, + + toJSON(message: Result): unknown { + const obj: any = {}; + message.data !== undefined + && (obj.data = base64FromBytes(message.data !== undefined ? message.data : new Uint8Array())); + message.log !== undefined && (obj.log = message.log); + if (message.events) { + obj.events = message.events.map((e) => e ? Event.toJSON(e) : undefined); + } else { + obj.events = []; + } + if (message.msgResponses) { + obj.msgResponses = message.msgResponses.map((e) => e ? Any.toJSON(e) : undefined); + } else { + obj.msgResponses = []; + } + return obj; + }, + + fromPartial, I>>(object: I): Result { + const message = createBaseResult(); + message.data = object.data ?? new Uint8Array(); + message.log = object.log ?? ""; + message.events = object.events?.map((e) => Event.fromPartial(e)) || []; + message.msgResponses = object.msgResponses?.map((e) => Any.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSimulationResponse(): SimulationResponse { + return { gasInfo: undefined, result: undefined }; +} + +export const SimulationResponse = { + encode(message: SimulationResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.gasInfo !== undefined) { + GasInfo.encode(message.gasInfo, writer.uint32(10).fork()).ldelim(); + } + if (message.result !== undefined) { + Result.encode(message.result, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SimulationResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSimulationResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.gasInfo = GasInfo.decode(reader, reader.uint32()); + break; + case 2: + message.result = Result.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SimulationResponse { + return { + gasInfo: isSet(object.gasInfo) ? GasInfo.fromJSON(object.gasInfo) : undefined, + result: isSet(object.result) ? Result.fromJSON(object.result) : undefined, + }; + }, + + toJSON(message: SimulationResponse): unknown { + const obj: any = {}; + message.gasInfo !== undefined && (obj.gasInfo = message.gasInfo ? GasInfo.toJSON(message.gasInfo) : undefined); + message.result !== undefined && (obj.result = message.result ? Result.toJSON(message.result) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): SimulationResponse { + const message = createBaseSimulationResponse(); + message.gasInfo = (object.gasInfo !== undefined && object.gasInfo !== null) + ? GasInfo.fromPartial(object.gasInfo) + : undefined; + message.result = (object.result !== undefined && object.result !== null) + ? Result.fromPartial(object.result) + : undefined; + return message; + }, +}; + +function createBaseMsgData(): MsgData { + return { msgType: "", data: new Uint8Array() }; +} + +export const MsgData = { + encode(message: MsgData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.msgType !== "") { + writer.uint32(10).string(message.msgType); + } + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgData(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.msgType = reader.string(); + break; + case 2: + message.data = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgData { + return { + msgType: isSet(object.msgType) ? String(object.msgType) : "", + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(), + }; + }, + + toJSON(message: MsgData): unknown { + const obj: any = {}; + message.msgType !== undefined && (obj.msgType = message.msgType); + message.data !== undefined + && (obj.data = base64FromBytes(message.data !== undefined ? message.data : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): MsgData { + const message = createBaseMsgData(); + message.msgType = object.msgType ?? ""; + message.data = object.data ?? new Uint8Array(); + return message; + }, +}; + +function createBaseTxMsgData(): TxMsgData { + return { data: [], msgResponses: [] }; +} + +export const TxMsgData = { + encode(message: TxMsgData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.data) { + MsgData.encode(v!, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.msgResponses) { + Any.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxMsgData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxMsgData(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.data.push(MsgData.decode(reader, reader.uint32())); + break; + case 2: + message.msgResponses.push(Any.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxMsgData { + return { + data: Array.isArray(object?.data) ? object.data.map((e: any) => MsgData.fromJSON(e)) : [], + msgResponses: Array.isArray(object?.msgResponses) ? object.msgResponses.map((e: any) => Any.fromJSON(e)) : [], + }; + }, + + toJSON(message: TxMsgData): unknown { + const obj: any = {}; + if (message.data) { + obj.data = message.data.map((e) => e ? MsgData.toJSON(e) : undefined); + } else { + obj.data = []; + } + if (message.msgResponses) { + obj.msgResponses = message.msgResponses.map((e) => e ? Any.toJSON(e) : undefined); + } else { + obj.msgResponses = []; + } + return obj; + }, + + fromPartial, I>>(object: I): TxMsgData { + const message = createBaseTxMsgData(); + message.data = object.data?.map((e) => MsgData.fromPartial(e)) || []; + message.msgResponses = object.msgResponses?.map((e) => Any.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSearchTxsResult(): SearchTxsResult { + return { totalCount: 0, count: 0, pageNumber: 0, pageTotal: 0, limit: 0, txs: [] }; +} + +export const SearchTxsResult = { + encode(message: SearchTxsResult, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.totalCount !== 0) { + writer.uint32(8).uint64(message.totalCount); + } + if (message.count !== 0) { + writer.uint32(16).uint64(message.count); + } + if (message.pageNumber !== 0) { + writer.uint32(24).uint64(message.pageNumber); + } + if (message.pageTotal !== 0) { + writer.uint32(32).uint64(message.pageTotal); + } + if (message.limit !== 0) { + writer.uint32(40).uint64(message.limit); + } + for (const v of message.txs) { + TxResponse.encode(v!, writer.uint32(50).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SearchTxsResult { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSearchTxsResult(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.totalCount = longToNumber(reader.uint64() as Long); + break; + case 2: + message.count = longToNumber(reader.uint64() as Long); + break; + case 3: + message.pageNumber = longToNumber(reader.uint64() as Long); + break; + case 4: + message.pageTotal = longToNumber(reader.uint64() as Long); + break; + case 5: + message.limit = longToNumber(reader.uint64() as Long); + break; + case 6: + message.txs.push(TxResponse.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SearchTxsResult { + return { + totalCount: isSet(object.totalCount) ? Number(object.totalCount) : 0, + count: isSet(object.count) ? Number(object.count) : 0, + pageNumber: isSet(object.pageNumber) ? Number(object.pageNumber) : 0, + pageTotal: isSet(object.pageTotal) ? Number(object.pageTotal) : 0, + limit: isSet(object.limit) ? Number(object.limit) : 0, + txs: Array.isArray(object?.txs) ? object.txs.map((e: any) => TxResponse.fromJSON(e)) : [], + }; + }, + + toJSON(message: SearchTxsResult): unknown { + const obj: any = {}; + message.totalCount !== undefined && (obj.totalCount = Math.round(message.totalCount)); + message.count !== undefined && (obj.count = Math.round(message.count)); + message.pageNumber !== undefined && (obj.pageNumber = Math.round(message.pageNumber)); + message.pageTotal !== undefined && (obj.pageTotal = Math.round(message.pageTotal)); + message.limit !== undefined && (obj.limit = Math.round(message.limit)); + if (message.txs) { + obj.txs = message.txs.map((e) => e ? TxResponse.toJSON(e) : undefined); + } else { + obj.txs = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SearchTxsResult { + const message = createBaseSearchTxsResult(); + message.totalCount = object.totalCount ?? 0; + message.count = object.count ?? 0; + message.pageNumber = object.pageNumber ?? 0; + message.pageTotal = object.pageTotal ?? 0; + message.limit = object.limit ?? 0; + message.txs = object.txs?.map((e) => TxResponse.fromPartial(e)) || []; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/cosmos/base/query/v1beta1/pagination.ts b/ts-client/cosmos.tx.v1beta1/types/cosmos/base/query/v1beta1/pagination.ts new file mode 100644 index 000000000..a31ca249d --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/cosmos/base/query/v1beta1/pagination.ts @@ -0,0 +1,286 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.base.query.v1beta1"; + +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset: number; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: number; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + countTotal: boolean; + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse: boolean; +} + +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total: number; +} + +function createBasePageRequest(): PageRequest { + return { key: new Uint8Array(), offset: 0, limit: 0, countTotal: false, reverse: false }; +} + +export const PageRequest = { + encode(message: PageRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.offset !== 0) { + writer.uint32(16).uint64(message.offset); + } + if (message.limit !== 0) { + writer.uint32(24).uint64(message.limit); + } + if (message.countTotal === true) { + writer.uint32(32).bool(message.countTotal); + } + if (message.reverse === true) { + writer.uint32(40).bool(message.reverse); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + case 2: + message.offset = longToNumber(reader.uint64() as Long); + break; + case 3: + message.limit = longToNumber(reader.uint64() as Long); + break; + case 4: + message.countTotal = reader.bool(); + break; + case 5: + message.reverse = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageRequest { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + offset: isSet(object.offset) ? Number(object.offset) : 0, + limit: isSet(object.limit) ? Number(object.limit) : 0, + countTotal: isSet(object.countTotal) ? Boolean(object.countTotal) : false, + reverse: isSet(object.reverse) ? Boolean(object.reverse) : false, + }; + }, + + toJSON(message: PageRequest): unknown { + const obj: any = {}; + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.offset !== undefined && (obj.offset = Math.round(message.offset)); + message.limit !== undefined && (obj.limit = Math.round(message.limit)); + message.countTotal !== undefined && (obj.countTotal = message.countTotal); + message.reverse !== undefined && (obj.reverse = message.reverse); + return obj; + }, + + fromPartial, I>>(object: I): PageRequest { + const message = createBasePageRequest(); + message.key = object.key ?? new Uint8Array(); + message.offset = object.offset ?? 0; + message.limit = object.limit ?? 0; + message.countTotal = object.countTotal ?? false; + message.reverse = object.reverse ?? false; + return message; + }, +}; + +function createBasePageResponse(): PageResponse { + return { nextKey: new Uint8Array(), total: 0 }; +} + +export const PageResponse = { + encode(message: PageResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextKey.length !== 0) { + writer.uint32(10).bytes(message.nextKey); + } + if (message.total !== 0) { + writer.uint32(16).uint64(message.total); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nextKey = reader.bytes(); + break; + case 2: + message.total = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageResponse { + return { + nextKey: isSet(object.nextKey) ? bytesFromBase64(object.nextKey) : new Uint8Array(), + total: isSet(object.total) ? Number(object.total) : 0, + }; + }, + + toJSON(message: PageResponse): unknown { + const obj: any = {}; + message.nextKey !== undefined + && (obj.nextKey = base64FromBytes(message.nextKey !== undefined ? message.nextKey : new Uint8Array())); + message.total !== undefined && (obj.total = Math.round(message.total)); + return obj; + }, + + fromPartial, I>>(object: I): PageResponse { + const message = createBasePageResponse(); + message.nextKey = object.nextKey ?? new Uint8Array(); + message.total = object.total ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/cosmos/base/v1beta1/coin.ts b/ts-client/cosmos.tx.v1beta1/types/cosmos/base/v1beta1/coin.ts new file mode 100644 index 000000000..b28eec03f --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/cosmos/base/v1beta1/coin.ts @@ -0,0 +1,261 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.base.v1beta1"; + +/** + * Coin defines a token with a denomination and an amount. + * + * NOTE: The amount field is an Int which implements the custom method + * signatures required by gogoproto. + */ +export interface Coin { + denom: string; + amount: string; +} + +/** + * DecCoin defines a token with a denomination and a decimal amount. + * + * NOTE: The amount field is an Dec which implements the custom method + * signatures required by gogoproto. + */ +export interface DecCoin { + denom: string; + amount: string; +} + +/** IntProto defines a Protobuf wrapper around an Int object. */ +export interface IntProto { + int: string; +} + +/** DecProto defines a Protobuf wrapper around a Dec object. */ +export interface DecProto { + dec: string; +} + +function createBaseCoin(): Coin { + return { denom: "", amount: "" }; +} + +export const Coin = { + encode(message: Coin, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.denom !== "") { + writer.uint32(10).string(message.denom); + } + if (message.amount !== "") { + writer.uint32(18).string(message.amount); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Coin { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCoin(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.denom = reader.string(); + break; + case 2: + message.amount = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Coin { + return { + denom: isSet(object.denom) ? String(object.denom) : "", + amount: isSet(object.amount) ? String(object.amount) : "", + }; + }, + + toJSON(message: Coin): unknown { + const obj: any = {}; + message.denom !== undefined && (obj.denom = message.denom); + message.amount !== undefined && (obj.amount = message.amount); + return obj; + }, + + fromPartial, I>>(object: I): Coin { + const message = createBaseCoin(); + message.denom = object.denom ?? ""; + message.amount = object.amount ?? ""; + return message; + }, +}; + +function createBaseDecCoin(): DecCoin { + return { denom: "", amount: "" }; +} + +export const DecCoin = { + encode(message: DecCoin, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.denom !== "") { + writer.uint32(10).string(message.denom); + } + if (message.amount !== "") { + writer.uint32(18).string(message.amount); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecCoin { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecCoin(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.denom = reader.string(); + break; + case 2: + message.amount = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DecCoin { + return { + denom: isSet(object.denom) ? String(object.denom) : "", + amount: isSet(object.amount) ? String(object.amount) : "", + }; + }, + + toJSON(message: DecCoin): unknown { + const obj: any = {}; + message.denom !== undefined && (obj.denom = message.denom); + message.amount !== undefined && (obj.amount = message.amount); + return obj; + }, + + fromPartial, I>>(object: I): DecCoin { + const message = createBaseDecCoin(); + message.denom = object.denom ?? ""; + message.amount = object.amount ?? ""; + return message; + }, +}; + +function createBaseIntProto(): IntProto { + return { int: "" }; +} + +export const IntProto = { + encode(message: IntProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.int !== "") { + writer.uint32(10).string(message.int); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): IntProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseIntProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.int = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): IntProto { + return { int: isSet(object.int) ? String(object.int) : "" }; + }, + + toJSON(message: IntProto): unknown { + const obj: any = {}; + message.int !== undefined && (obj.int = message.int); + return obj; + }, + + fromPartial, I>>(object: I): IntProto { + const message = createBaseIntProto(); + message.int = object.int ?? ""; + return message; + }, +}; + +function createBaseDecProto(): DecProto { + return { dec: "" }; +} + +export const DecProto = { + encode(message: DecProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.dec !== "") { + writer.uint32(10).string(message.dec); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.dec = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DecProto { + return { dec: isSet(object.dec) ? String(object.dec) : "" }; + }, + + toJSON(message: DecProto): unknown { + const obj: any = {}; + message.dec !== undefined && (obj.dec = message.dec); + return obj; + }, + + fromPartial, I>>(object: I): DecProto { + const message = createBaseDecProto(); + message.dec = object.dec ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/cosmos/crypto/multisig/v1beta1/multisig.ts b/ts-client/cosmos.tx.v1beta1/types/cosmos/crypto/multisig/v1beta1/multisig.ts new file mode 100644 index 000000000..44740b79e --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/cosmos/crypto/multisig/v1beta1/multisig.ts @@ -0,0 +1,195 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.crypto.multisig.v1beta1"; + +/** + * MultiSignature wraps the signatures from a multisig.LegacyAminoPubKey. + * See cosmos.tx.v1betata1.ModeInfo.Multi for how to specify which signers + * signed and with which modes. + */ +export interface MultiSignature { + signatures: Uint8Array[]; +} + +/** + * CompactBitArray is an implementation of a space efficient bit array. + * This is used to ensure that the encoded data takes up a minimal amount of + * space after proto encoding. + * This is not thread safe, and is not intended for concurrent usage. + */ +export interface CompactBitArray { + extraBitsStored: number; + elems: Uint8Array; +} + +function createBaseMultiSignature(): MultiSignature { + return { signatures: [] }; +} + +export const MultiSignature = { + encode(message: MultiSignature, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.signatures) { + writer.uint32(10).bytes(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MultiSignature { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMultiSignature(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signatures.push(reader.bytes()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MultiSignature { + return { + signatures: Array.isArray(object?.signatures) ? object.signatures.map((e: any) => bytesFromBase64(e)) : [], + }; + }, + + toJSON(message: MultiSignature): unknown { + const obj: any = {}; + if (message.signatures) { + obj.signatures = message.signatures.map((e) => base64FromBytes(e !== undefined ? e : new Uint8Array())); + } else { + obj.signatures = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MultiSignature { + const message = createBaseMultiSignature(); + message.signatures = object.signatures?.map((e) => e) || []; + return message; + }, +}; + +function createBaseCompactBitArray(): CompactBitArray { + return { extraBitsStored: 0, elems: new Uint8Array() }; +} + +export const CompactBitArray = { + encode(message: CompactBitArray, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.extraBitsStored !== 0) { + writer.uint32(8).uint32(message.extraBitsStored); + } + if (message.elems.length !== 0) { + writer.uint32(18).bytes(message.elems); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CompactBitArray { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCompactBitArray(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.extraBitsStored = reader.uint32(); + break; + case 2: + message.elems = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CompactBitArray { + return { + extraBitsStored: isSet(object.extraBitsStored) ? Number(object.extraBitsStored) : 0, + elems: isSet(object.elems) ? bytesFromBase64(object.elems) : new Uint8Array(), + }; + }, + + toJSON(message: CompactBitArray): unknown { + const obj: any = {}; + message.extraBitsStored !== undefined && (obj.extraBitsStored = Math.round(message.extraBitsStored)); + message.elems !== undefined + && (obj.elems = base64FromBytes(message.elems !== undefined ? message.elems : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): CompactBitArray { + const message = createBaseCompactBitArray(); + message.extraBitsStored = object.extraBitsStored ?? 0; + message.elems = object.elems ?? new Uint8Array(); + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/cosmos/tx/signing/v1beta1/signing.ts b/ts-client/cosmos.tx.v1beta1/types/cosmos/tx/signing/v1beta1/signing.ts new file mode 100644 index 000000000..fb7e64a93 --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/cosmos/tx/signing/v1beta1/signing.ts @@ -0,0 +1,556 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Any } from "../../../../google/protobuf/any"; +import { CompactBitArray } from "../../../crypto/multisig/v1beta1/multisig"; + +export const protobufPackage = "cosmos.tx.signing.v1beta1"; + +/** + * SignMode represents a signing mode with its own security guarantees. + * + * This enum should be considered a registry of all known sign modes + * in the Cosmos ecosystem. Apps are not expected to support all known + * sign modes. Apps that would like to support custom sign modes are + * encouraged to open a small PR against this file to add a new case + * to this SignMode enum describing their sign mode so that different + * apps have a consistent version of this enum. + */ +export enum SignMode { + /** + * SIGN_MODE_UNSPECIFIED - SIGN_MODE_UNSPECIFIED specifies an unknown signing mode and will be + * rejected. + */ + SIGN_MODE_UNSPECIFIED = 0, + /** + * SIGN_MODE_DIRECT - SIGN_MODE_DIRECT specifies a signing mode which uses SignDoc and is + * verified with raw bytes from Tx. + */ + SIGN_MODE_DIRECT = 1, + /** + * SIGN_MODE_TEXTUAL - SIGN_MODE_TEXTUAL is a future signing mode that will verify some + * human-readable textual representation on top of the binary representation + * from SIGN_MODE_DIRECT. It is currently not supported. + */ + SIGN_MODE_TEXTUAL = 2, + /** + * SIGN_MODE_DIRECT_AUX - SIGN_MODE_DIRECT_AUX specifies a signing mode which uses + * SignDocDirectAux. As opposed to SIGN_MODE_DIRECT, this sign mode does not + * require signers signing over other signers' `signer_info`. It also allows + * for adding Tips in transactions. + * + * Since: cosmos-sdk 0.46 + */ + SIGN_MODE_DIRECT_AUX = 3, + /** + * SIGN_MODE_LEGACY_AMINO_JSON - SIGN_MODE_LEGACY_AMINO_JSON is a backwards compatibility mode which uses + * Amino JSON and will be removed in the future. + */ + SIGN_MODE_LEGACY_AMINO_JSON = 127, + /** + * SIGN_MODE_EIP_191 - SIGN_MODE_EIP_191 specifies the sign mode for EIP 191 signing on the Cosmos + * SDK. Ref: https://eips.ethereum.org/EIPS/eip-191 + * + * Currently, SIGN_MODE_EIP_191 is registered as a SignMode enum variant, + * but is not implemented on the SDK by default. To enable EIP-191, you need + * to pass a custom `TxConfig` that has an implementation of + * `SignModeHandler` for EIP-191. The SDK may decide to fully support + * EIP-191 in the future. + * + * Since: cosmos-sdk 0.45.2 + */ + SIGN_MODE_EIP_191 = 191, + UNRECOGNIZED = -1, +} + +export function signModeFromJSON(object: any): SignMode { + switch (object) { + case 0: + case "SIGN_MODE_UNSPECIFIED": + return SignMode.SIGN_MODE_UNSPECIFIED; + case 1: + case "SIGN_MODE_DIRECT": + return SignMode.SIGN_MODE_DIRECT; + case 2: + case "SIGN_MODE_TEXTUAL": + return SignMode.SIGN_MODE_TEXTUAL; + case 3: + case "SIGN_MODE_DIRECT_AUX": + return SignMode.SIGN_MODE_DIRECT_AUX; + case 127: + case "SIGN_MODE_LEGACY_AMINO_JSON": + return SignMode.SIGN_MODE_LEGACY_AMINO_JSON; + case 191: + case "SIGN_MODE_EIP_191": + return SignMode.SIGN_MODE_EIP_191; + case -1: + case "UNRECOGNIZED": + default: + return SignMode.UNRECOGNIZED; + } +} + +export function signModeToJSON(object: SignMode): string { + switch (object) { + case SignMode.SIGN_MODE_UNSPECIFIED: + return "SIGN_MODE_UNSPECIFIED"; + case SignMode.SIGN_MODE_DIRECT: + return "SIGN_MODE_DIRECT"; + case SignMode.SIGN_MODE_TEXTUAL: + return "SIGN_MODE_TEXTUAL"; + case SignMode.SIGN_MODE_DIRECT_AUX: + return "SIGN_MODE_DIRECT_AUX"; + case SignMode.SIGN_MODE_LEGACY_AMINO_JSON: + return "SIGN_MODE_LEGACY_AMINO_JSON"; + case SignMode.SIGN_MODE_EIP_191: + return "SIGN_MODE_EIP_191"; + case SignMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** SignatureDescriptors wraps multiple SignatureDescriptor's. */ +export interface SignatureDescriptors { + /** signatures are the signature descriptors */ + signatures: SignatureDescriptor[]; +} + +/** + * SignatureDescriptor is a convenience type which represents the full data for + * a signature including the public key of the signer, signing modes and the + * signature itself. It is primarily used for coordinating signatures between + * clients. + */ +export interface SignatureDescriptor { + /** public_key is the public key of the signer */ + publicKey: Any | undefined; + data: + | SignatureDescriptor_Data + | undefined; + /** + * sequence is the sequence of the account, which describes the + * number of committed transactions signed by a given address. It is used to prevent + * replay attacks. + */ + sequence: number; +} + +/** Data represents signature data */ +export interface SignatureDescriptor_Data { + /** single represents a single signer */ + single: + | SignatureDescriptor_Data_Single + | undefined; + /** multi represents a multisig signer */ + multi: SignatureDescriptor_Data_Multi | undefined; +} + +/** Single is the signature data for a single signer */ +export interface SignatureDescriptor_Data_Single { + /** mode is the signing mode of the single signer */ + mode: SignMode; + /** signature is the raw signature bytes */ + signature: Uint8Array; +} + +/** Multi is the signature data for a multisig public key */ +export interface SignatureDescriptor_Data_Multi { + /** bitarray specifies which keys within the multisig are signing */ + bitarray: + | CompactBitArray + | undefined; + /** signatures is the signatures of the multi-signature */ + signatures: SignatureDescriptor_Data[]; +} + +function createBaseSignatureDescriptors(): SignatureDescriptors { + return { signatures: [] }; +} + +export const SignatureDescriptors = { + encode(message: SignatureDescriptors, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.signatures) { + SignatureDescriptor.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptors { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignatureDescriptors(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signatures.push(SignatureDescriptor.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SignatureDescriptors { + return { + signatures: Array.isArray(object?.signatures) + ? object.signatures.map((e: any) => SignatureDescriptor.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SignatureDescriptors): unknown { + const obj: any = {}; + if (message.signatures) { + obj.signatures = message.signatures.map((e) => e ? SignatureDescriptor.toJSON(e) : undefined); + } else { + obj.signatures = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SignatureDescriptors { + const message = createBaseSignatureDescriptors(); + message.signatures = object.signatures?.map((e) => SignatureDescriptor.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSignatureDescriptor(): SignatureDescriptor { + return { publicKey: undefined, data: undefined, sequence: 0 }; +} + +export const SignatureDescriptor = { + encode(message: SignatureDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.publicKey !== undefined) { + Any.encode(message.publicKey, writer.uint32(10).fork()).ldelim(); + } + if (message.data !== undefined) { + SignatureDescriptor_Data.encode(message.data, writer.uint32(18).fork()).ldelim(); + } + if (message.sequence !== 0) { + writer.uint32(24).uint64(message.sequence); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignatureDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.publicKey = Any.decode(reader, reader.uint32()); + break; + case 2: + message.data = SignatureDescriptor_Data.decode(reader, reader.uint32()); + break; + case 3: + message.sequence = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SignatureDescriptor { + return { + publicKey: isSet(object.publicKey) ? Any.fromJSON(object.publicKey) : undefined, + data: isSet(object.data) ? SignatureDescriptor_Data.fromJSON(object.data) : undefined, + sequence: isSet(object.sequence) ? Number(object.sequence) : 0, + }; + }, + + toJSON(message: SignatureDescriptor): unknown { + const obj: any = {}; + message.publicKey !== undefined && (obj.publicKey = message.publicKey ? Any.toJSON(message.publicKey) : undefined); + message.data !== undefined && (obj.data = message.data ? SignatureDescriptor_Data.toJSON(message.data) : undefined); + message.sequence !== undefined && (obj.sequence = Math.round(message.sequence)); + return obj; + }, + + fromPartial, I>>(object: I): SignatureDescriptor { + const message = createBaseSignatureDescriptor(); + message.publicKey = (object.publicKey !== undefined && object.publicKey !== null) + ? Any.fromPartial(object.publicKey) + : undefined; + message.data = (object.data !== undefined && object.data !== null) + ? SignatureDescriptor_Data.fromPartial(object.data) + : undefined; + message.sequence = object.sequence ?? 0; + return message; + }, +}; + +function createBaseSignatureDescriptor_Data(): SignatureDescriptor_Data { + return { single: undefined, multi: undefined }; +} + +export const SignatureDescriptor_Data = { + encode(message: SignatureDescriptor_Data, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.single !== undefined) { + SignatureDescriptor_Data_Single.encode(message.single, writer.uint32(10).fork()).ldelim(); + } + if (message.multi !== undefined) { + SignatureDescriptor_Data_Multi.encode(message.multi, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptor_Data { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignatureDescriptor_Data(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.single = SignatureDescriptor_Data_Single.decode(reader, reader.uint32()); + break; + case 2: + message.multi = SignatureDescriptor_Data_Multi.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SignatureDescriptor_Data { + return { + single: isSet(object.single) ? SignatureDescriptor_Data_Single.fromJSON(object.single) : undefined, + multi: isSet(object.multi) ? SignatureDescriptor_Data_Multi.fromJSON(object.multi) : undefined, + }; + }, + + toJSON(message: SignatureDescriptor_Data): unknown { + const obj: any = {}; + message.single !== undefined + && (obj.single = message.single ? SignatureDescriptor_Data_Single.toJSON(message.single) : undefined); + message.multi !== undefined + && (obj.multi = message.multi ? SignatureDescriptor_Data_Multi.toJSON(message.multi) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): SignatureDescriptor_Data { + const message = createBaseSignatureDescriptor_Data(); + message.single = (object.single !== undefined && object.single !== null) + ? SignatureDescriptor_Data_Single.fromPartial(object.single) + : undefined; + message.multi = (object.multi !== undefined && object.multi !== null) + ? SignatureDescriptor_Data_Multi.fromPartial(object.multi) + : undefined; + return message; + }, +}; + +function createBaseSignatureDescriptor_Data_Single(): SignatureDescriptor_Data_Single { + return { mode: 0, signature: new Uint8Array() }; +} + +export const SignatureDescriptor_Data_Single = { + encode(message: SignatureDescriptor_Data_Single, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.mode !== 0) { + writer.uint32(8).int32(message.mode); + } + if (message.signature.length !== 0) { + writer.uint32(18).bytes(message.signature); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptor_Data_Single { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignatureDescriptor_Data_Single(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.mode = reader.int32() as any; + break; + case 2: + message.signature = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SignatureDescriptor_Data_Single { + return { + mode: isSet(object.mode) ? signModeFromJSON(object.mode) : 0, + signature: isSet(object.signature) ? bytesFromBase64(object.signature) : new Uint8Array(), + }; + }, + + toJSON(message: SignatureDescriptor_Data_Single): unknown { + const obj: any = {}; + message.mode !== undefined && (obj.mode = signModeToJSON(message.mode)); + message.signature !== undefined + && (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : new Uint8Array())); + return obj; + }, + + fromPartial, I>>( + object: I, + ): SignatureDescriptor_Data_Single { + const message = createBaseSignatureDescriptor_Data_Single(); + message.mode = object.mode ?? 0; + message.signature = object.signature ?? new Uint8Array(); + return message; + }, +}; + +function createBaseSignatureDescriptor_Data_Multi(): SignatureDescriptor_Data_Multi { + return { bitarray: undefined, signatures: [] }; +} + +export const SignatureDescriptor_Data_Multi = { + encode(message: SignatureDescriptor_Data_Multi, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.bitarray !== undefined) { + CompactBitArray.encode(message.bitarray, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.signatures) { + SignatureDescriptor_Data.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignatureDescriptor_Data_Multi { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignatureDescriptor_Data_Multi(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.bitarray = CompactBitArray.decode(reader, reader.uint32()); + break; + case 2: + message.signatures.push(SignatureDescriptor_Data.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SignatureDescriptor_Data_Multi { + return { + bitarray: isSet(object.bitarray) ? CompactBitArray.fromJSON(object.bitarray) : undefined, + signatures: Array.isArray(object?.signatures) + ? object.signatures.map((e: any) => SignatureDescriptor_Data.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SignatureDescriptor_Data_Multi): unknown { + const obj: any = {}; + message.bitarray !== undefined + && (obj.bitarray = message.bitarray ? CompactBitArray.toJSON(message.bitarray) : undefined); + if (message.signatures) { + obj.signatures = message.signatures.map((e) => e ? SignatureDescriptor_Data.toJSON(e) : undefined); + } else { + obj.signatures = []; + } + return obj; + }, + + fromPartial, I>>( + object: I, + ): SignatureDescriptor_Data_Multi { + const message = createBaseSignatureDescriptor_Data_Multi(); + message.bitarray = (object.bitarray !== undefined && object.bitarray !== null) + ? CompactBitArray.fromPartial(object.bitarray) + : undefined; + message.signatures = object.signatures?.map((e) => SignatureDescriptor_Data.fromPartial(e)) || []; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/cosmos/tx/v1beta1/service.ts b/ts-client/cosmos.tx.v1beta1/types/cosmos/tx/v1beta1/service.ts new file mode 100644 index 000000000..72775bdea --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/cosmos/tx/v1beta1/service.ts @@ -0,0 +1,1579 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Block } from "../../../tendermint/types/block"; +import { BlockID } from "../../../tendermint/types/types"; +import { GasInfo, Result, TxResponse } from "../../base/abci/v1beta1/abci"; +import { PageRequest, PageResponse } from "../../base/query/v1beta1/pagination"; +import { Tx } from "./tx"; + +export const protobufPackage = "cosmos.tx.v1beta1"; + +/** OrderBy defines the sorting order */ +export enum OrderBy { + /** ORDER_BY_UNSPECIFIED - ORDER_BY_UNSPECIFIED specifies an unknown sorting order. OrderBy defaults to ASC in this case. */ + ORDER_BY_UNSPECIFIED = 0, + /** ORDER_BY_ASC - ORDER_BY_ASC defines ascending order */ + ORDER_BY_ASC = 1, + /** ORDER_BY_DESC - ORDER_BY_DESC defines descending order */ + ORDER_BY_DESC = 2, + UNRECOGNIZED = -1, +} + +export function orderByFromJSON(object: any): OrderBy { + switch (object) { + case 0: + case "ORDER_BY_UNSPECIFIED": + return OrderBy.ORDER_BY_UNSPECIFIED; + case 1: + case "ORDER_BY_ASC": + return OrderBy.ORDER_BY_ASC; + case 2: + case "ORDER_BY_DESC": + return OrderBy.ORDER_BY_DESC; + case -1: + case "UNRECOGNIZED": + default: + return OrderBy.UNRECOGNIZED; + } +} + +export function orderByToJSON(object: OrderBy): string { + switch (object) { + case OrderBy.ORDER_BY_UNSPECIFIED: + return "ORDER_BY_UNSPECIFIED"; + case OrderBy.ORDER_BY_ASC: + return "ORDER_BY_ASC"; + case OrderBy.ORDER_BY_DESC: + return "ORDER_BY_DESC"; + case OrderBy.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method. */ +export enum BroadcastMode { + /** BROADCAST_MODE_UNSPECIFIED - zero-value for mode ordering */ + BROADCAST_MODE_UNSPECIFIED = 0, + /** + * BROADCAST_MODE_BLOCK - DEPRECATED: use BROADCAST_MODE_SYNC instead, + * BROADCAST_MODE_BLOCK is not supported by the SDK from v0.47.x onwards. + * + * @deprecated + */ + BROADCAST_MODE_BLOCK = 1, + /** + * BROADCAST_MODE_SYNC - BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for + * a CheckTx execution response only. + */ + BROADCAST_MODE_SYNC = 2, + /** + * BROADCAST_MODE_ASYNC - BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns + * immediately. + */ + BROADCAST_MODE_ASYNC = 3, + UNRECOGNIZED = -1, +} + +export function broadcastModeFromJSON(object: any): BroadcastMode { + switch (object) { + case 0: + case "BROADCAST_MODE_UNSPECIFIED": + return BroadcastMode.BROADCAST_MODE_UNSPECIFIED; + case 1: + case "BROADCAST_MODE_BLOCK": + return BroadcastMode.BROADCAST_MODE_BLOCK; + case 2: + case "BROADCAST_MODE_SYNC": + return BroadcastMode.BROADCAST_MODE_SYNC; + case 3: + case "BROADCAST_MODE_ASYNC": + return BroadcastMode.BROADCAST_MODE_ASYNC; + case -1: + case "UNRECOGNIZED": + default: + return BroadcastMode.UNRECOGNIZED; + } +} + +export function broadcastModeToJSON(object: BroadcastMode): string { + switch (object) { + case BroadcastMode.BROADCAST_MODE_UNSPECIFIED: + return "BROADCAST_MODE_UNSPECIFIED"; + case BroadcastMode.BROADCAST_MODE_BLOCK: + return "BROADCAST_MODE_BLOCK"; + case BroadcastMode.BROADCAST_MODE_SYNC: + return "BROADCAST_MODE_SYNC"; + case BroadcastMode.BROADCAST_MODE_ASYNC: + return "BROADCAST_MODE_ASYNC"; + case BroadcastMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * GetTxsEventRequest is the request type for the Service.TxsByEvents + * RPC method. + */ +export interface GetTxsEventRequest { + /** events is the list of transaction event type. */ + events: string[]; + /** + * pagination defines a pagination for the request. + * Deprecated post v0.46.x: use page and limit instead. + * + * @deprecated + */ + pagination: PageRequest | undefined; + orderBy: OrderBy; + /** page is the page number to query, starts at 1. If not provided, will default to first page. */ + page: number; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: number; +} + +/** + * GetTxsEventResponse is the response type for the Service.TxsByEvents + * RPC method. + */ +export interface GetTxsEventResponse { + /** txs is the list of queried transactions. */ + txs: Tx[]; + /** tx_responses is the list of queried TxResponses. */ + txResponses: TxResponse[]; + /** + * pagination defines a pagination for the response. + * Deprecated post v0.46.x: use total instead. + * + * @deprecated + */ + pagination: + | PageResponse + | undefined; + /** total is total number of results available */ + total: number; +} + +/** + * BroadcastTxRequest is the request type for the Service.BroadcastTxRequest + * RPC method. + */ +export interface BroadcastTxRequest { + /** tx_bytes is the raw transaction. */ + txBytes: Uint8Array; + mode: BroadcastMode; +} + +/** + * BroadcastTxResponse is the response type for the + * Service.BroadcastTx method. + */ +export interface BroadcastTxResponse { + /** tx_response is the queried TxResponses. */ + txResponse: TxResponse | undefined; +} + +/** + * SimulateRequest is the request type for the Service.Simulate + * RPC method. + */ +export interface SimulateRequest { + /** + * tx is the transaction to simulate. + * Deprecated. Send raw tx bytes instead. + * + * @deprecated + */ + tx: + | Tx + | undefined; + /** + * tx_bytes is the raw transaction. + * + * Since: cosmos-sdk 0.43 + */ + txBytes: Uint8Array; +} + +/** + * SimulateResponse is the response type for the + * Service.SimulateRPC method. + */ +export interface SimulateResponse { + /** gas_info is the information about gas used in the simulation. */ + gasInfo: + | GasInfo + | undefined; + /** result is the result of the simulation. */ + result: Result | undefined; +} + +/** + * GetTxRequest is the request type for the Service.GetTx + * RPC method. + */ +export interface GetTxRequest { + /** hash is the tx hash to query, encoded as a hex string. */ + hash: string; +} + +/** GetTxResponse is the response type for the Service.GetTx method. */ +export interface GetTxResponse { + /** tx is the queried transaction. */ + tx: + | Tx + | undefined; + /** tx_response is the queried TxResponses. */ + txResponse: TxResponse | undefined; +} + +/** + * GetBlockWithTxsRequest is the request type for the Service.GetBlockWithTxs + * RPC method. + * + * Since: cosmos-sdk 0.45.2 + */ +export interface GetBlockWithTxsRequest { + /** height is the height of the block to query. */ + height: number; + /** pagination defines a pagination for the request. */ + pagination: PageRequest | undefined; +} + +/** + * GetBlockWithTxsResponse is the response type for the Service.GetBlockWithTxs method. + * + * Since: cosmos-sdk 0.45.2 + */ +export interface GetBlockWithTxsResponse { + /** txs are the transactions in the block. */ + txs: Tx[]; + blockId: BlockID | undefined; + block: + | Block + | undefined; + /** pagination defines a pagination for the response. */ + pagination: PageResponse | undefined; +} + +/** + * TxDecodeRequest is the request type for the Service.TxDecode + * RPC method. + * + * Since: cosmos-sdk 0.47 + */ +export interface TxDecodeRequest { + /** tx_bytes is the raw transaction. */ + txBytes: Uint8Array; +} + +/** + * TxDecodeResponse is the response type for the + * Service.TxDecode method. + * + * Since: cosmos-sdk 0.47 + */ +export interface TxDecodeResponse { + /** tx is the decoded transaction. */ + tx: Tx | undefined; +} + +/** + * TxEncodeRequest is the request type for the Service.TxEncode + * RPC method. + * + * Since: cosmos-sdk 0.47 + */ +export interface TxEncodeRequest { + /** tx is the transaction to encode. */ + tx: Tx | undefined; +} + +/** + * TxEncodeResponse is the response type for the + * Service.TxEncode method. + * + * Since: cosmos-sdk 0.47 + */ +export interface TxEncodeResponse { + /** tx_bytes is the encoded transaction bytes. */ + txBytes: Uint8Array; +} + +/** + * TxEncodeAminoRequest is the request type for the Service.TxEncodeAmino + * RPC method. + * + * Since: cosmos-sdk 0.47 + */ +export interface TxEncodeAminoRequest { + aminoJson: string; +} + +/** + * TxEncodeAminoResponse is the response type for the Service.TxEncodeAmino + * RPC method. + * + * Since: cosmos-sdk 0.47 + */ +export interface TxEncodeAminoResponse { + aminoBinary: Uint8Array; +} + +/** + * TxDecodeAminoRequest is the request type for the Service.TxDecodeAmino + * RPC method. + * + * Since: cosmos-sdk 0.47 + */ +export interface TxDecodeAminoRequest { + aminoBinary: Uint8Array; +} + +/** + * TxDecodeAminoResponse is the response type for the Service.TxDecodeAmino + * RPC method. + * + * Since: cosmos-sdk 0.47 + */ +export interface TxDecodeAminoResponse { + aminoJson: string; +} + +function createBaseGetTxsEventRequest(): GetTxsEventRequest { + return { events: [], pagination: undefined, orderBy: 0, page: 0, limit: 0 }; +} + +export const GetTxsEventRequest = { + encode(message: GetTxsEventRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.events) { + writer.uint32(10).string(v!); + } + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + if (message.orderBy !== 0) { + writer.uint32(24).int32(message.orderBy); + } + if (message.page !== 0) { + writer.uint32(32).uint64(message.page); + } + if (message.limit !== 0) { + writer.uint32(40).uint64(message.limit); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxsEventRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetTxsEventRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.events.push(reader.string()); + break; + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + case 3: + message.orderBy = reader.int32() as any; + break; + case 4: + message.page = longToNumber(reader.uint64() as Long); + break; + case 5: + message.limit = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetTxsEventRequest { + return { + events: Array.isArray(object?.events) ? object.events.map((e: any) => String(e)) : [], + pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined, + orderBy: isSet(object.orderBy) ? orderByFromJSON(object.orderBy) : 0, + page: isSet(object.page) ? Number(object.page) : 0, + limit: isSet(object.limit) ? Number(object.limit) : 0, + }; + }, + + toJSON(message: GetTxsEventRequest): unknown { + const obj: any = {}; + if (message.events) { + obj.events = message.events.map((e) => e); + } else { + obj.events = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + message.orderBy !== undefined && (obj.orderBy = orderByToJSON(message.orderBy)); + message.page !== undefined && (obj.page = Math.round(message.page)); + message.limit !== undefined && (obj.limit = Math.round(message.limit)); + return obj; + }, + + fromPartial, I>>(object: I): GetTxsEventRequest { + const message = createBaseGetTxsEventRequest(); + message.events = object.events?.map((e) => e) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + message.orderBy = object.orderBy ?? 0; + message.page = object.page ?? 0; + message.limit = object.limit ?? 0; + return message; + }, +}; + +function createBaseGetTxsEventResponse(): GetTxsEventResponse { + return { txs: [], txResponses: [], pagination: undefined, total: 0 }; +} + +export const GetTxsEventResponse = { + encode(message: GetTxsEventResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.txs) { + Tx.encode(v!, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.txResponses) { + TxResponse.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(26).fork()).ldelim(); + } + if (message.total !== 0) { + writer.uint32(32).uint64(message.total); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxsEventResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetTxsEventResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.txs.push(Tx.decode(reader, reader.uint32())); + break; + case 2: + message.txResponses.push(TxResponse.decode(reader, reader.uint32())); + break; + case 3: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + case 4: + message.total = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetTxsEventResponse { + return { + txs: Array.isArray(object?.txs) ? object.txs.map((e: any) => Tx.fromJSON(e)) : [], + txResponses: Array.isArray(object?.txResponses) ? object.txResponses.map((e: any) => TxResponse.fromJSON(e)) : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + total: isSet(object.total) ? Number(object.total) : 0, + }; + }, + + toJSON(message: GetTxsEventResponse): unknown { + const obj: any = {}; + if (message.txs) { + obj.txs = message.txs.map((e) => e ? Tx.toJSON(e) : undefined); + } else { + obj.txs = []; + } + if (message.txResponses) { + obj.txResponses = message.txResponses.map((e) => e ? TxResponse.toJSON(e) : undefined); + } else { + obj.txResponses = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + message.total !== undefined && (obj.total = Math.round(message.total)); + return obj; + }, + + fromPartial, I>>(object: I): GetTxsEventResponse { + const message = createBaseGetTxsEventResponse(); + message.txs = object.txs?.map((e) => Tx.fromPartial(e)) || []; + message.txResponses = object.txResponses?.map((e) => TxResponse.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + message.total = object.total ?? 0; + return message; + }, +}; + +function createBaseBroadcastTxRequest(): BroadcastTxRequest { + return { txBytes: new Uint8Array(), mode: 0 }; +} + +export const BroadcastTxRequest = { + encode(message: BroadcastTxRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.txBytes.length !== 0) { + writer.uint32(10).bytes(message.txBytes); + } + if (message.mode !== 0) { + writer.uint32(16).int32(message.mode); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BroadcastTxRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBroadcastTxRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.txBytes = reader.bytes(); + break; + case 2: + message.mode = reader.int32() as any; + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): BroadcastTxRequest { + return { + txBytes: isSet(object.txBytes) ? bytesFromBase64(object.txBytes) : new Uint8Array(), + mode: isSet(object.mode) ? broadcastModeFromJSON(object.mode) : 0, + }; + }, + + toJSON(message: BroadcastTxRequest): unknown { + const obj: any = {}; + message.txBytes !== undefined + && (obj.txBytes = base64FromBytes(message.txBytes !== undefined ? message.txBytes : new Uint8Array())); + message.mode !== undefined && (obj.mode = broadcastModeToJSON(message.mode)); + return obj; + }, + + fromPartial, I>>(object: I): BroadcastTxRequest { + const message = createBaseBroadcastTxRequest(); + message.txBytes = object.txBytes ?? new Uint8Array(); + message.mode = object.mode ?? 0; + return message; + }, +}; + +function createBaseBroadcastTxResponse(): BroadcastTxResponse { + return { txResponse: undefined }; +} + +export const BroadcastTxResponse = { + encode(message: BroadcastTxResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.txResponse !== undefined) { + TxResponse.encode(message.txResponse, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BroadcastTxResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBroadcastTxResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.txResponse = TxResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): BroadcastTxResponse { + return { txResponse: isSet(object.txResponse) ? TxResponse.fromJSON(object.txResponse) : undefined }; + }, + + toJSON(message: BroadcastTxResponse): unknown { + const obj: any = {}; + message.txResponse !== undefined + && (obj.txResponse = message.txResponse ? TxResponse.toJSON(message.txResponse) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): BroadcastTxResponse { + const message = createBaseBroadcastTxResponse(); + message.txResponse = (object.txResponse !== undefined && object.txResponse !== null) + ? TxResponse.fromPartial(object.txResponse) + : undefined; + return message; + }, +}; + +function createBaseSimulateRequest(): SimulateRequest { + return { tx: undefined, txBytes: new Uint8Array() }; +} + +export const SimulateRequest = { + encode(message: SimulateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tx !== undefined) { + Tx.encode(message.tx, writer.uint32(10).fork()).ldelim(); + } + if (message.txBytes.length !== 0) { + writer.uint32(18).bytes(message.txBytes); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SimulateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSimulateRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.tx = Tx.decode(reader, reader.uint32()); + break; + case 2: + message.txBytes = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SimulateRequest { + return { + tx: isSet(object.tx) ? Tx.fromJSON(object.tx) : undefined, + txBytes: isSet(object.txBytes) ? bytesFromBase64(object.txBytes) : new Uint8Array(), + }; + }, + + toJSON(message: SimulateRequest): unknown { + const obj: any = {}; + message.tx !== undefined && (obj.tx = message.tx ? Tx.toJSON(message.tx) : undefined); + message.txBytes !== undefined + && (obj.txBytes = base64FromBytes(message.txBytes !== undefined ? message.txBytes : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): SimulateRequest { + const message = createBaseSimulateRequest(); + message.tx = (object.tx !== undefined && object.tx !== null) ? Tx.fromPartial(object.tx) : undefined; + message.txBytes = object.txBytes ?? new Uint8Array(); + return message; + }, +}; + +function createBaseSimulateResponse(): SimulateResponse { + return { gasInfo: undefined, result: undefined }; +} + +export const SimulateResponse = { + encode(message: SimulateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.gasInfo !== undefined) { + GasInfo.encode(message.gasInfo, writer.uint32(10).fork()).ldelim(); + } + if (message.result !== undefined) { + Result.encode(message.result, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SimulateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSimulateResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.gasInfo = GasInfo.decode(reader, reader.uint32()); + break; + case 2: + message.result = Result.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SimulateResponse { + return { + gasInfo: isSet(object.gasInfo) ? GasInfo.fromJSON(object.gasInfo) : undefined, + result: isSet(object.result) ? Result.fromJSON(object.result) : undefined, + }; + }, + + toJSON(message: SimulateResponse): unknown { + const obj: any = {}; + message.gasInfo !== undefined && (obj.gasInfo = message.gasInfo ? GasInfo.toJSON(message.gasInfo) : undefined); + message.result !== undefined && (obj.result = message.result ? Result.toJSON(message.result) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): SimulateResponse { + const message = createBaseSimulateResponse(); + message.gasInfo = (object.gasInfo !== undefined && object.gasInfo !== null) + ? GasInfo.fromPartial(object.gasInfo) + : undefined; + message.result = (object.result !== undefined && object.result !== null) + ? Result.fromPartial(object.result) + : undefined; + return message; + }, +}; + +function createBaseGetTxRequest(): GetTxRequest { + return { hash: "" }; +} + +export const GetTxRequest = { + encode(message: GetTxRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hash !== "") { + writer.uint32(10).string(message.hash); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetTxRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.hash = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetTxRequest { + return { hash: isSet(object.hash) ? String(object.hash) : "" }; + }, + + toJSON(message: GetTxRequest): unknown { + const obj: any = {}; + message.hash !== undefined && (obj.hash = message.hash); + return obj; + }, + + fromPartial, I>>(object: I): GetTxRequest { + const message = createBaseGetTxRequest(); + message.hash = object.hash ?? ""; + return message; + }, +}; + +function createBaseGetTxResponse(): GetTxResponse { + return { tx: undefined, txResponse: undefined }; +} + +export const GetTxResponse = { + encode(message: GetTxResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tx !== undefined) { + Tx.encode(message.tx, writer.uint32(10).fork()).ldelim(); + } + if (message.txResponse !== undefined) { + TxResponse.encode(message.txResponse, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetTxResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetTxResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.tx = Tx.decode(reader, reader.uint32()); + break; + case 2: + message.txResponse = TxResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetTxResponse { + return { + tx: isSet(object.tx) ? Tx.fromJSON(object.tx) : undefined, + txResponse: isSet(object.txResponse) ? TxResponse.fromJSON(object.txResponse) : undefined, + }; + }, + + toJSON(message: GetTxResponse): unknown { + const obj: any = {}; + message.tx !== undefined && (obj.tx = message.tx ? Tx.toJSON(message.tx) : undefined); + message.txResponse !== undefined + && (obj.txResponse = message.txResponse ? TxResponse.toJSON(message.txResponse) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): GetTxResponse { + const message = createBaseGetTxResponse(); + message.tx = (object.tx !== undefined && object.tx !== null) ? Tx.fromPartial(object.tx) : undefined; + message.txResponse = (object.txResponse !== undefined && object.txResponse !== null) + ? TxResponse.fromPartial(object.txResponse) + : undefined; + return message; + }, +}; + +function createBaseGetBlockWithTxsRequest(): GetBlockWithTxsRequest { + return { height: 0, pagination: undefined }; +} + +export const GetBlockWithTxsRequest = { + encode(message: GetBlockWithTxsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.height !== 0) { + writer.uint32(8).int64(message.height); + } + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetBlockWithTxsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetBlockWithTxsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.height = longToNumber(reader.int64() as Long); + break; + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetBlockWithTxsRequest { + return { + height: isSet(object.height) ? Number(object.height) : 0, + pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: GetBlockWithTxsRequest): unknown { + const obj: any = {}; + message.height !== undefined && (obj.height = Math.round(message.height)); + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): GetBlockWithTxsRequest { + const message = createBaseGetBlockWithTxsRequest(); + message.height = object.height ?? 0; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseGetBlockWithTxsResponse(): GetBlockWithTxsResponse { + return { txs: [], blockId: undefined, block: undefined, pagination: undefined }; +} + +export const GetBlockWithTxsResponse = { + encode(message: GetBlockWithTxsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.txs) { + Tx.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(18).fork()).ldelim(); + } + if (message.block !== undefined) { + Block.encode(message.block, writer.uint32(26).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetBlockWithTxsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetBlockWithTxsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.txs.push(Tx.decode(reader, reader.uint32())); + break; + case 2: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + case 3: + message.block = Block.decode(reader, reader.uint32()); + break; + case 4: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GetBlockWithTxsResponse { + return { + txs: Array.isArray(object?.txs) ? object.txs.map((e: any) => Tx.fromJSON(e)) : [], + blockId: isSet(object.blockId) ? BlockID.fromJSON(object.blockId) : undefined, + block: isSet(object.block) ? Block.fromJSON(object.block) : undefined, + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: GetBlockWithTxsResponse): unknown { + const obj: any = {}; + if (message.txs) { + obj.txs = message.txs.map((e) => e ? Tx.toJSON(e) : undefined); + } else { + obj.txs = []; + } + message.blockId !== undefined && (obj.blockId = message.blockId ? BlockID.toJSON(message.blockId) : undefined); + message.block !== undefined && (obj.block = message.block ? Block.toJSON(message.block) : undefined); + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): GetBlockWithTxsResponse { + const message = createBaseGetBlockWithTxsResponse(); + message.txs = object.txs?.map((e) => Tx.fromPartial(e)) || []; + message.blockId = (object.blockId !== undefined && object.blockId !== null) + ? BlockID.fromPartial(object.blockId) + : undefined; + message.block = (object.block !== undefined && object.block !== null) ? Block.fromPartial(object.block) : undefined; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseTxDecodeRequest(): TxDecodeRequest { + return { txBytes: new Uint8Array() }; +} + +export const TxDecodeRequest = { + encode(message: TxDecodeRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.txBytes.length !== 0) { + writer.uint32(10).bytes(message.txBytes); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxDecodeRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxDecodeRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.txBytes = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxDecodeRequest { + return { txBytes: isSet(object.txBytes) ? bytesFromBase64(object.txBytes) : new Uint8Array() }; + }, + + toJSON(message: TxDecodeRequest): unknown { + const obj: any = {}; + message.txBytes !== undefined + && (obj.txBytes = base64FromBytes(message.txBytes !== undefined ? message.txBytes : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): TxDecodeRequest { + const message = createBaseTxDecodeRequest(); + message.txBytes = object.txBytes ?? new Uint8Array(); + return message; + }, +}; + +function createBaseTxDecodeResponse(): TxDecodeResponse { + return { tx: undefined }; +} + +export const TxDecodeResponse = { + encode(message: TxDecodeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tx !== undefined) { + Tx.encode(message.tx, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxDecodeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxDecodeResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.tx = Tx.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxDecodeResponse { + return { tx: isSet(object.tx) ? Tx.fromJSON(object.tx) : undefined }; + }, + + toJSON(message: TxDecodeResponse): unknown { + const obj: any = {}; + message.tx !== undefined && (obj.tx = message.tx ? Tx.toJSON(message.tx) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): TxDecodeResponse { + const message = createBaseTxDecodeResponse(); + message.tx = (object.tx !== undefined && object.tx !== null) ? Tx.fromPartial(object.tx) : undefined; + return message; + }, +}; + +function createBaseTxEncodeRequest(): TxEncodeRequest { + return { tx: undefined }; +} + +export const TxEncodeRequest = { + encode(message: TxEncodeRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tx !== undefined) { + Tx.encode(message.tx, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxEncodeRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxEncodeRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.tx = Tx.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxEncodeRequest { + return { tx: isSet(object.tx) ? Tx.fromJSON(object.tx) : undefined }; + }, + + toJSON(message: TxEncodeRequest): unknown { + const obj: any = {}; + message.tx !== undefined && (obj.tx = message.tx ? Tx.toJSON(message.tx) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): TxEncodeRequest { + const message = createBaseTxEncodeRequest(); + message.tx = (object.tx !== undefined && object.tx !== null) ? Tx.fromPartial(object.tx) : undefined; + return message; + }, +}; + +function createBaseTxEncodeResponse(): TxEncodeResponse { + return { txBytes: new Uint8Array() }; +} + +export const TxEncodeResponse = { + encode(message: TxEncodeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.txBytes.length !== 0) { + writer.uint32(10).bytes(message.txBytes); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxEncodeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxEncodeResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.txBytes = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxEncodeResponse { + return { txBytes: isSet(object.txBytes) ? bytesFromBase64(object.txBytes) : new Uint8Array() }; + }, + + toJSON(message: TxEncodeResponse): unknown { + const obj: any = {}; + message.txBytes !== undefined + && (obj.txBytes = base64FromBytes(message.txBytes !== undefined ? message.txBytes : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): TxEncodeResponse { + const message = createBaseTxEncodeResponse(); + message.txBytes = object.txBytes ?? new Uint8Array(); + return message; + }, +}; + +function createBaseTxEncodeAminoRequest(): TxEncodeAminoRequest { + return { aminoJson: "" }; +} + +export const TxEncodeAminoRequest = { + encode(message: TxEncodeAminoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.aminoJson !== "") { + writer.uint32(10).string(message.aminoJson); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxEncodeAminoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxEncodeAminoRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.aminoJson = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxEncodeAminoRequest { + return { aminoJson: isSet(object.aminoJson) ? String(object.aminoJson) : "" }; + }, + + toJSON(message: TxEncodeAminoRequest): unknown { + const obj: any = {}; + message.aminoJson !== undefined && (obj.aminoJson = message.aminoJson); + return obj; + }, + + fromPartial, I>>(object: I): TxEncodeAminoRequest { + const message = createBaseTxEncodeAminoRequest(); + message.aminoJson = object.aminoJson ?? ""; + return message; + }, +}; + +function createBaseTxEncodeAminoResponse(): TxEncodeAminoResponse { + return { aminoBinary: new Uint8Array() }; +} + +export const TxEncodeAminoResponse = { + encode(message: TxEncodeAminoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.aminoBinary.length !== 0) { + writer.uint32(10).bytes(message.aminoBinary); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxEncodeAminoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxEncodeAminoResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.aminoBinary = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxEncodeAminoResponse { + return { aminoBinary: isSet(object.aminoBinary) ? bytesFromBase64(object.aminoBinary) : new Uint8Array() }; + }, + + toJSON(message: TxEncodeAminoResponse): unknown { + const obj: any = {}; + message.aminoBinary !== undefined + && (obj.aminoBinary = base64FromBytes( + message.aminoBinary !== undefined ? message.aminoBinary : new Uint8Array(), + )); + return obj; + }, + + fromPartial, I>>(object: I): TxEncodeAminoResponse { + const message = createBaseTxEncodeAminoResponse(); + message.aminoBinary = object.aminoBinary ?? new Uint8Array(); + return message; + }, +}; + +function createBaseTxDecodeAminoRequest(): TxDecodeAminoRequest { + return { aminoBinary: new Uint8Array() }; +} + +export const TxDecodeAminoRequest = { + encode(message: TxDecodeAminoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.aminoBinary.length !== 0) { + writer.uint32(10).bytes(message.aminoBinary); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxDecodeAminoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxDecodeAminoRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.aminoBinary = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxDecodeAminoRequest { + return { aminoBinary: isSet(object.aminoBinary) ? bytesFromBase64(object.aminoBinary) : new Uint8Array() }; + }, + + toJSON(message: TxDecodeAminoRequest): unknown { + const obj: any = {}; + message.aminoBinary !== undefined + && (obj.aminoBinary = base64FromBytes( + message.aminoBinary !== undefined ? message.aminoBinary : new Uint8Array(), + )); + return obj; + }, + + fromPartial, I>>(object: I): TxDecodeAminoRequest { + const message = createBaseTxDecodeAminoRequest(); + message.aminoBinary = object.aminoBinary ?? new Uint8Array(); + return message; + }, +}; + +function createBaseTxDecodeAminoResponse(): TxDecodeAminoResponse { + return { aminoJson: "" }; +} + +export const TxDecodeAminoResponse = { + encode(message: TxDecodeAminoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.aminoJson !== "") { + writer.uint32(10).string(message.aminoJson); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxDecodeAminoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxDecodeAminoResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.aminoJson = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxDecodeAminoResponse { + return { aminoJson: isSet(object.aminoJson) ? String(object.aminoJson) : "" }; + }, + + toJSON(message: TxDecodeAminoResponse): unknown { + const obj: any = {}; + message.aminoJson !== undefined && (obj.aminoJson = message.aminoJson); + return obj; + }, + + fromPartial, I>>(object: I): TxDecodeAminoResponse { + const message = createBaseTxDecodeAminoResponse(); + message.aminoJson = object.aminoJson ?? ""; + return message; + }, +}; + +/** Service defines a gRPC service for interacting with transactions. */ +export interface Service { + /** Simulate simulates executing a transaction for estimating gas usage. */ + Simulate(request: SimulateRequest): Promise; + /** GetTx fetches a tx by hash. */ + GetTx(request: GetTxRequest): Promise; + /** BroadcastTx broadcast transaction. */ + BroadcastTx(request: BroadcastTxRequest): Promise; + /** GetTxsEvent fetches txs by event. */ + GetTxsEvent(request: GetTxsEventRequest): Promise; + /** + * GetBlockWithTxs fetches a block with decoded txs. + * + * Since: cosmos-sdk 0.45.2 + */ + GetBlockWithTxs(request: GetBlockWithTxsRequest): Promise; + /** + * TxDecode decodes the transaction. + * + * Since: cosmos-sdk 0.47 + */ + TxDecode(request: TxDecodeRequest): Promise; + /** + * TxEncode encodes the transaction. + * + * Since: cosmos-sdk 0.47 + */ + TxEncode(request: TxEncodeRequest): Promise; + /** + * TxEncodeAmino encodes an Amino transaction from JSON to encoded bytes. + * + * Since: cosmos-sdk 0.47 + */ + TxEncodeAmino(request: TxEncodeAminoRequest): Promise; + /** + * TxDecodeAmino decodes an Amino transaction from encoded bytes to JSON. + * + * Since: cosmos-sdk 0.47 + */ + TxDecodeAmino(request: TxDecodeAminoRequest): Promise; +} + +export class ServiceClientImpl implements Service { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.Simulate = this.Simulate.bind(this); + this.GetTx = this.GetTx.bind(this); + this.BroadcastTx = this.BroadcastTx.bind(this); + this.GetTxsEvent = this.GetTxsEvent.bind(this); + this.GetBlockWithTxs = this.GetBlockWithTxs.bind(this); + this.TxDecode = this.TxDecode.bind(this); + this.TxEncode = this.TxEncode.bind(this); + this.TxEncodeAmino = this.TxEncodeAmino.bind(this); + this.TxDecodeAmino = this.TxDecodeAmino.bind(this); + } + Simulate(request: SimulateRequest): Promise { + const data = SimulateRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.tx.v1beta1.Service", "Simulate", data); + return promise.then((data) => SimulateResponse.decode(new _m0.Reader(data))); + } + + GetTx(request: GetTxRequest): Promise { + const data = GetTxRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.tx.v1beta1.Service", "GetTx", data); + return promise.then((data) => GetTxResponse.decode(new _m0.Reader(data))); + } + + BroadcastTx(request: BroadcastTxRequest): Promise { + const data = BroadcastTxRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.tx.v1beta1.Service", "BroadcastTx", data); + return promise.then((data) => BroadcastTxResponse.decode(new _m0.Reader(data))); + } + + GetTxsEvent(request: GetTxsEventRequest): Promise { + const data = GetTxsEventRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.tx.v1beta1.Service", "GetTxsEvent", data); + return promise.then((data) => GetTxsEventResponse.decode(new _m0.Reader(data))); + } + + GetBlockWithTxs(request: GetBlockWithTxsRequest): Promise { + const data = GetBlockWithTxsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.tx.v1beta1.Service", "GetBlockWithTxs", data); + return promise.then((data) => GetBlockWithTxsResponse.decode(new _m0.Reader(data))); + } + + TxDecode(request: TxDecodeRequest): Promise { + const data = TxDecodeRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.tx.v1beta1.Service", "TxDecode", data); + return promise.then((data) => TxDecodeResponse.decode(new _m0.Reader(data))); + } + + TxEncode(request: TxEncodeRequest): Promise { + const data = TxEncodeRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.tx.v1beta1.Service", "TxEncode", data); + return promise.then((data) => TxEncodeResponse.decode(new _m0.Reader(data))); + } + + TxEncodeAmino(request: TxEncodeAminoRequest): Promise { + const data = TxEncodeAminoRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.tx.v1beta1.Service", "TxEncodeAmino", data); + return promise.then((data) => TxEncodeAminoResponse.decode(new _m0.Reader(data))); + } + + TxDecodeAmino(request: TxDecodeAminoRequest): Promise { + const data = TxDecodeAminoRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.tx.v1beta1.Service", "TxDecodeAmino", data); + return promise.then((data) => TxDecodeAminoResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/cosmos/tx/v1beta1/tx.ts b/ts-client/cosmos.tx.v1beta1/types/cosmos/tx/v1beta1/tx.ts new file mode 100644 index 000000000..f896b011e --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/cosmos/tx/v1beta1/tx.ts @@ -0,0 +1,1355 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Any } from "../../../google/protobuf/any"; +import { Coin } from "../../base/v1beta1/coin"; +import { CompactBitArray } from "../../crypto/multisig/v1beta1/multisig"; +import { SignMode, signModeFromJSON, signModeToJSON } from "../signing/v1beta1/signing"; + +export const protobufPackage = "cosmos.tx.v1beta1"; + +/** Tx is the standard type used for broadcasting transactions. */ +export interface Tx { + /** body is the processable content of the transaction */ + body: + | TxBody + | undefined; + /** + * auth_info is the authorization related content of the transaction, + * specifically signers, signer modes and fee + */ + authInfo: + | AuthInfo + | undefined; + /** + * signatures is a list of signatures that matches the length and order of + * AuthInfo's signer_infos to allow connecting signature meta information like + * public key and signing mode by position. + */ + signatures: Uint8Array[]; +} + +/** + * TxRaw is a variant of Tx that pins the signer's exact binary representation + * of body and auth_info. This is used for signing, broadcasting and + * verification. The binary `serialize(tx: TxRaw)` is stored in Tendermint and + * the hash `sha256(serialize(tx: TxRaw))` becomes the "txhash", commonly used + * as the transaction ID. + */ +export interface TxRaw { + /** + * body_bytes is a protobuf serialization of a TxBody that matches the + * representation in SignDoc. + */ + bodyBytes: Uint8Array; + /** + * auth_info_bytes is a protobuf serialization of an AuthInfo that matches the + * representation in SignDoc. + */ + authInfoBytes: Uint8Array; + /** + * signatures is a list of signatures that matches the length and order of + * AuthInfo's signer_infos to allow connecting signature meta information like + * public key and signing mode by position. + */ + signatures: Uint8Array[]; +} + +/** SignDoc is the type used for generating sign bytes for SIGN_MODE_DIRECT. */ +export interface SignDoc { + /** + * body_bytes is protobuf serialization of a TxBody that matches the + * representation in TxRaw. + */ + bodyBytes: Uint8Array; + /** + * auth_info_bytes is a protobuf serialization of an AuthInfo that matches the + * representation in TxRaw. + */ + authInfoBytes: Uint8Array; + /** + * chain_id is the unique identifier of the chain this transaction targets. + * It prevents signed transactions from being used on another chain by an + * attacker + */ + chainId: string; + /** account_number is the account number of the account in state */ + accountNumber: number; +} + +/** + * SignDocDirectAux is the type used for generating sign bytes for + * SIGN_MODE_DIRECT_AUX. + * + * Since: cosmos-sdk 0.46 + */ +export interface SignDocDirectAux { + /** + * body_bytes is protobuf serialization of a TxBody that matches the + * representation in TxRaw. + */ + bodyBytes: Uint8Array; + /** public_key is the public key of the signing account. */ + publicKey: + | Any + | undefined; + /** + * chain_id is the identifier of the chain this transaction targets. + * It prevents signed transactions from being used on another chain by an + * attacker. + */ + chainId: string; + /** account_number is the account number of the account in state. */ + accountNumber: number; + /** sequence is the sequence number of the signing account. */ + sequence: number; + /** + * Tip is the optional tip used for transactions fees paid in another denom. + * It should be left empty if the signer is not the tipper for this + * transaction. + * + * This field is ignored if the chain didn't enable tips, i.e. didn't add the + * `TipDecorator` in its posthandler. + */ + tip: Tip | undefined; +} + +/** TxBody is the body of a transaction that all signers sign over. */ +export interface TxBody { + /** + * messages is a list of messages to be executed. The required signers of + * those messages define the number and order of elements in AuthInfo's + * signer_infos and Tx's signatures. Each required signer address is added to + * the list only the first time it occurs. + * By convention, the first required signer (usually from the first message) + * is referred to as the primary signer and pays the fee for the whole + * transaction. + */ + messages: Any[]; + /** + * memo is any arbitrary note/comment to be added to the transaction. + * WARNING: in clients, any publicly exposed text should not be called memo, + * but should be called `note` instead (see https://github.com/cosmos/cosmos-sdk/issues/9122). + */ + memo: string; + /** + * timeout is the block height after which this transaction will not + * be processed by the chain + */ + timeoutHeight: number; + /** + * extension_options are arbitrary options that can be added by chains + * when the default options are not sufficient. If any of these are present + * and can't be handled, the transaction will be rejected + */ + extensionOptions: Any[]; + /** + * extension_options are arbitrary options that can be added by chains + * when the default options are not sufficient. If any of these are present + * and can't be handled, they will be ignored + */ + nonCriticalExtensionOptions: Any[]; +} + +/** + * AuthInfo describes the fee and signer modes that are used to sign a + * transaction. + */ +export interface AuthInfo { + /** + * signer_infos defines the signing modes for the required signers. The number + * and order of elements must match the required signers from TxBody's + * messages. The first element is the primary signer and the one which pays + * the fee. + */ + signerInfos: SignerInfo[]; + /** + * Fee is the fee and gas limit for the transaction. The first signer is the + * primary signer and the one which pays the fee. The fee can be calculated + * based on the cost of evaluating the body and doing signature verification + * of the signers. This can be estimated via simulation. + */ + fee: + | Fee + | undefined; + /** + * Tip is the optional tip used for transactions fees paid in another denom. + * + * This field is ignored if the chain didn't enable tips, i.e. didn't add the + * `TipDecorator` in its posthandler. + * + * Since: cosmos-sdk 0.46 + */ + tip: Tip | undefined; +} + +/** + * SignerInfo describes the public key and signing mode of a single top-level + * signer. + */ +export interface SignerInfo { + /** + * public_key is the public key of the signer. It is optional for accounts + * that already exist in state. If unset, the verifier can use the required \ + * signer address for this position and lookup the public key. + */ + publicKey: + | Any + | undefined; + /** + * mode_info describes the signing mode of the signer and is a nested + * structure to support nested multisig pubkey's + */ + modeInfo: + | ModeInfo + | undefined; + /** + * sequence is the sequence of the account, which describes the + * number of committed transactions signed by a given address. It is used to + * prevent replay attacks. + */ + sequence: number; +} + +/** ModeInfo describes the signing mode of a single or nested multisig signer. */ +export interface ModeInfo { + /** single represents a single signer */ + single: + | ModeInfo_Single + | undefined; + /** multi represents a nested multisig signer */ + multi: ModeInfo_Multi | undefined; +} + +/** + * Single is the mode info for a single signer. It is structured as a message + * to allow for additional fields such as locale for SIGN_MODE_TEXTUAL in the + * future + */ +export interface ModeInfo_Single { + /** mode is the signing mode of the single signer */ + mode: SignMode; +} + +/** Multi is the mode info for a multisig public key */ +export interface ModeInfo_Multi { + /** bitarray specifies which keys within the multisig are signing */ + bitarray: + | CompactBitArray + | undefined; + /** + * mode_infos is the corresponding modes of the signers of the multisig + * which could include nested multisig public keys + */ + modeInfos: ModeInfo[]; +} + +/** + * Fee includes the amount of coins paid in fees and the maximum + * gas to be used by the transaction. The ratio yields an effective "gasprice", + * which must be above some miminum to be accepted into the mempool. + */ +export interface Fee { + /** amount is the amount of coins to be paid as a fee */ + amount: Coin[]; + /** + * gas_limit is the maximum gas that can be used in transaction processing + * before an out of gas error occurs + */ + gasLimit: number; + /** + * if unset, the first signer is responsible for paying the fees. If set, the specified account must pay the fees. + * the payer must be a tx signer (and thus have signed this field in AuthInfo). + * setting this field does *not* change the ordering of required signers for the transaction. + */ + payer: string; + /** + * if set, the fee payer (either the first signer or the value of the payer field) requests that a fee grant be used + * to pay fees instead of the fee payer's own balance. If an appropriate fee grant does not exist or the chain does + * not support fee grants, this will fail + */ + granter: string; +} + +/** + * Tip is the tip used for meta-transactions. + * + * Since: cosmos-sdk 0.46 + */ +export interface Tip { + /** amount is the amount of the tip */ + amount: Coin[]; + /** tipper is the address of the account paying for the tip */ + tipper: string; +} + +/** + * AuxSignerData is the intermediary format that an auxiliary signer (e.g. a + * tipper) builds and sends to the fee payer (who will build and broadcast the + * actual tx). AuxSignerData is not a valid tx in itself, and will be rejected + * by the node if sent directly as-is. + * + * Since: cosmos-sdk 0.46 + */ +export interface AuxSignerData { + /** + * address is the bech32-encoded address of the auxiliary signer. If using + * AuxSignerData across different chains, the bech32 prefix of the target + * chain (where the final transaction is broadcasted) should be used. + */ + address: string; + /** + * sign_doc is the SIGN_MODE_DIRECT_AUX sign doc that the auxiliary signer + * signs. Note: we use the same sign doc even if we're signing with + * LEGACY_AMINO_JSON. + */ + signDoc: + | SignDocDirectAux + | undefined; + /** mode is the signing mode of the single signer. */ + mode: SignMode; + /** sig is the signature of the sign doc. */ + sig: Uint8Array; +} + +function createBaseTx(): Tx { + return { body: undefined, authInfo: undefined, signatures: [] }; +} + +export const Tx = { + encode(message: Tx, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.body !== undefined) { + TxBody.encode(message.body, writer.uint32(10).fork()).ldelim(); + } + if (message.authInfo !== undefined) { + AuthInfo.encode(message.authInfo, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.signatures) { + writer.uint32(26).bytes(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Tx { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTx(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.body = TxBody.decode(reader, reader.uint32()); + break; + case 2: + message.authInfo = AuthInfo.decode(reader, reader.uint32()); + break; + case 3: + message.signatures.push(reader.bytes()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Tx { + return { + body: isSet(object.body) ? TxBody.fromJSON(object.body) : undefined, + authInfo: isSet(object.authInfo) ? AuthInfo.fromJSON(object.authInfo) : undefined, + signatures: Array.isArray(object?.signatures) ? object.signatures.map((e: any) => bytesFromBase64(e)) : [], + }; + }, + + toJSON(message: Tx): unknown { + const obj: any = {}; + message.body !== undefined && (obj.body = message.body ? TxBody.toJSON(message.body) : undefined); + message.authInfo !== undefined && (obj.authInfo = message.authInfo ? AuthInfo.toJSON(message.authInfo) : undefined); + if (message.signatures) { + obj.signatures = message.signatures.map((e) => base64FromBytes(e !== undefined ? e : new Uint8Array())); + } else { + obj.signatures = []; + } + return obj; + }, + + fromPartial, I>>(object: I): Tx { + const message = createBaseTx(); + message.body = (object.body !== undefined && object.body !== null) ? TxBody.fromPartial(object.body) : undefined; + message.authInfo = (object.authInfo !== undefined && object.authInfo !== null) + ? AuthInfo.fromPartial(object.authInfo) + : undefined; + message.signatures = object.signatures?.map((e) => e) || []; + return message; + }, +}; + +function createBaseTxRaw(): TxRaw { + return { bodyBytes: new Uint8Array(), authInfoBytes: new Uint8Array(), signatures: [] }; +} + +export const TxRaw = { + encode(message: TxRaw, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.bodyBytes.length !== 0) { + writer.uint32(10).bytes(message.bodyBytes); + } + if (message.authInfoBytes.length !== 0) { + writer.uint32(18).bytes(message.authInfoBytes); + } + for (const v of message.signatures) { + writer.uint32(26).bytes(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxRaw { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxRaw(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.bodyBytes = reader.bytes(); + break; + case 2: + message.authInfoBytes = reader.bytes(); + break; + case 3: + message.signatures.push(reader.bytes()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxRaw { + return { + bodyBytes: isSet(object.bodyBytes) ? bytesFromBase64(object.bodyBytes) : new Uint8Array(), + authInfoBytes: isSet(object.authInfoBytes) ? bytesFromBase64(object.authInfoBytes) : new Uint8Array(), + signatures: Array.isArray(object?.signatures) ? object.signatures.map((e: any) => bytesFromBase64(e)) : [], + }; + }, + + toJSON(message: TxRaw): unknown { + const obj: any = {}; + message.bodyBytes !== undefined + && (obj.bodyBytes = base64FromBytes(message.bodyBytes !== undefined ? message.bodyBytes : new Uint8Array())); + message.authInfoBytes !== undefined + && (obj.authInfoBytes = base64FromBytes( + message.authInfoBytes !== undefined ? message.authInfoBytes : new Uint8Array(), + )); + if (message.signatures) { + obj.signatures = message.signatures.map((e) => base64FromBytes(e !== undefined ? e : new Uint8Array())); + } else { + obj.signatures = []; + } + return obj; + }, + + fromPartial, I>>(object: I): TxRaw { + const message = createBaseTxRaw(); + message.bodyBytes = object.bodyBytes ?? new Uint8Array(); + message.authInfoBytes = object.authInfoBytes ?? new Uint8Array(); + message.signatures = object.signatures?.map((e) => e) || []; + return message; + }, +}; + +function createBaseSignDoc(): SignDoc { + return { bodyBytes: new Uint8Array(), authInfoBytes: new Uint8Array(), chainId: "", accountNumber: 0 }; +} + +export const SignDoc = { + encode(message: SignDoc, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.bodyBytes.length !== 0) { + writer.uint32(10).bytes(message.bodyBytes); + } + if (message.authInfoBytes.length !== 0) { + writer.uint32(18).bytes(message.authInfoBytes); + } + if (message.chainId !== "") { + writer.uint32(26).string(message.chainId); + } + if (message.accountNumber !== 0) { + writer.uint32(32).uint64(message.accountNumber); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignDoc { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignDoc(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.bodyBytes = reader.bytes(); + break; + case 2: + message.authInfoBytes = reader.bytes(); + break; + case 3: + message.chainId = reader.string(); + break; + case 4: + message.accountNumber = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SignDoc { + return { + bodyBytes: isSet(object.bodyBytes) ? bytesFromBase64(object.bodyBytes) : new Uint8Array(), + authInfoBytes: isSet(object.authInfoBytes) ? bytesFromBase64(object.authInfoBytes) : new Uint8Array(), + chainId: isSet(object.chainId) ? String(object.chainId) : "", + accountNumber: isSet(object.accountNumber) ? Number(object.accountNumber) : 0, + }; + }, + + toJSON(message: SignDoc): unknown { + const obj: any = {}; + message.bodyBytes !== undefined + && (obj.bodyBytes = base64FromBytes(message.bodyBytes !== undefined ? message.bodyBytes : new Uint8Array())); + message.authInfoBytes !== undefined + && (obj.authInfoBytes = base64FromBytes( + message.authInfoBytes !== undefined ? message.authInfoBytes : new Uint8Array(), + )); + message.chainId !== undefined && (obj.chainId = message.chainId); + message.accountNumber !== undefined && (obj.accountNumber = Math.round(message.accountNumber)); + return obj; + }, + + fromPartial, I>>(object: I): SignDoc { + const message = createBaseSignDoc(); + message.bodyBytes = object.bodyBytes ?? new Uint8Array(); + message.authInfoBytes = object.authInfoBytes ?? new Uint8Array(); + message.chainId = object.chainId ?? ""; + message.accountNumber = object.accountNumber ?? 0; + return message; + }, +}; + +function createBaseSignDocDirectAux(): SignDocDirectAux { + return { + bodyBytes: new Uint8Array(), + publicKey: undefined, + chainId: "", + accountNumber: 0, + sequence: 0, + tip: undefined, + }; +} + +export const SignDocDirectAux = { + encode(message: SignDocDirectAux, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.bodyBytes.length !== 0) { + writer.uint32(10).bytes(message.bodyBytes); + } + if (message.publicKey !== undefined) { + Any.encode(message.publicKey, writer.uint32(18).fork()).ldelim(); + } + if (message.chainId !== "") { + writer.uint32(26).string(message.chainId); + } + if (message.accountNumber !== 0) { + writer.uint32(32).uint64(message.accountNumber); + } + if (message.sequence !== 0) { + writer.uint32(40).uint64(message.sequence); + } + if (message.tip !== undefined) { + Tip.encode(message.tip, writer.uint32(50).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignDocDirectAux { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignDocDirectAux(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.bodyBytes = reader.bytes(); + break; + case 2: + message.publicKey = Any.decode(reader, reader.uint32()); + break; + case 3: + message.chainId = reader.string(); + break; + case 4: + message.accountNumber = longToNumber(reader.uint64() as Long); + break; + case 5: + message.sequence = longToNumber(reader.uint64() as Long); + break; + case 6: + message.tip = Tip.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SignDocDirectAux { + return { + bodyBytes: isSet(object.bodyBytes) ? bytesFromBase64(object.bodyBytes) : new Uint8Array(), + publicKey: isSet(object.publicKey) ? Any.fromJSON(object.publicKey) : undefined, + chainId: isSet(object.chainId) ? String(object.chainId) : "", + accountNumber: isSet(object.accountNumber) ? Number(object.accountNumber) : 0, + sequence: isSet(object.sequence) ? Number(object.sequence) : 0, + tip: isSet(object.tip) ? Tip.fromJSON(object.tip) : undefined, + }; + }, + + toJSON(message: SignDocDirectAux): unknown { + const obj: any = {}; + message.bodyBytes !== undefined + && (obj.bodyBytes = base64FromBytes(message.bodyBytes !== undefined ? message.bodyBytes : new Uint8Array())); + message.publicKey !== undefined && (obj.publicKey = message.publicKey ? Any.toJSON(message.publicKey) : undefined); + message.chainId !== undefined && (obj.chainId = message.chainId); + message.accountNumber !== undefined && (obj.accountNumber = Math.round(message.accountNumber)); + message.sequence !== undefined && (obj.sequence = Math.round(message.sequence)); + message.tip !== undefined && (obj.tip = message.tip ? Tip.toJSON(message.tip) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): SignDocDirectAux { + const message = createBaseSignDocDirectAux(); + message.bodyBytes = object.bodyBytes ?? new Uint8Array(); + message.publicKey = (object.publicKey !== undefined && object.publicKey !== null) + ? Any.fromPartial(object.publicKey) + : undefined; + message.chainId = object.chainId ?? ""; + message.accountNumber = object.accountNumber ?? 0; + message.sequence = object.sequence ?? 0; + message.tip = (object.tip !== undefined && object.tip !== null) ? Tip.fromPartial(object.tip) : undefined; + return message; + }, +}; + +function createBaseTxBody(): TxBody { + return { messages: [], memo: "", timeoutHeight: 0, extensionOptions: [], nonCriticalExtensionOptions: [] }; +} + +export const TxBody = { + encode(message: TxBody, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.messages) { + Any.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.memo !== "") { + writer.uint32(18).string(message.memo); + } + if (message.timeoutHeight !== 0) { + writer.uint32(24).uint64(message.timeoutHeight); + } + for (const v of message.extensionOptions) { + Any.encode(v!, writer.uint32(8186).fork()).ldelim(); + } + for (const v of message.nonCriticalExtensionOptions) { + Any.encode(v!, writer.uint32(16378).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxBody { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxBody(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messages.push(Any.decode(reader, reader.uint32())); + break; + case 2: + message.memo = reader.string(); + break; + case 3: + message.timeoutHeight = longToNumber(reader.uint64() as Long); + break; + case 1023: + message.extensionOptions.push(Any.decode(reader, reader.uint32())); + break; + case 2047: + message.nonCriticalExtensionOptions.push(Any.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxBody { + return { + messages: Array.isArray(object?.messages) ? object.messages.map((e: any) => Any.fromJSON(e)) : [], + memo: isSet(object.memo) ? String(object.memo) : "", + timeoutHeight: isSet(object.timeoutHeight) ? Number(object.timeoutHeight) : 0, + extensionOptions: Array.isArray(object?.extensionOptions) + ? object.extensionOptions.map((e: any) => Any.fromJSON(e)) + : [], + nonCriticalExtensionOptions: Array.isArray(object?.nonCriticalExtensionOptions) + ? object.nonCriticalExtensionOptions.map((e: any) => Any.fromJSON(e)) + : [], + }; + }, + + toJSON(message: TxBody): unknown { + const obj: any = {}; + if (message.messages) { + obj.messages = message.messages.map((e) => e ? Any.toJSON(e) : undefined); + } else { + obj.messages = []; + } + message.memo !== undefined && (obj.memo = message.memo); + message.timeoutHeight !== undefined && (obj.timeoutHeight = Math.round(message.timeoutHeight)); + if (message.extensionOptions) { + obj.extensionOptions = message.extensionOptions.map((e) => e ? Any.toJSON(e) : undefined); + } else { + obj.extensionOptions = []; + } + if (message.nonCriticalExtensionOptions) { + obj.nonCriticalExtensionOptions = message.nonCriticalExtensionOptions.map((e) => e ? Any.toJSON(e) : undefined); + } else { + obj.nonCriticalExtensionOptions = []; + } + return obj; + }, + + fromPartial, I>>(object: I): TxBody { + const message = createBaseTxBody(); + message.messages = object.messages?.map((e) => Any.fromPartial(e)) || []; + message.memo = object.memo ?? ""; + message.timeoutHeight = object.timeoutHeight ?? 0; + message.extensionOptions = object.extensionOptions?.map((e) => Any.fromPartial(e)) || []; + message.nonCriticalExtensionOptions = object.nonCriticalExtensionOptions?.map((e) => Any.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseAuthInfo(): AuthInfo { + return { signerInfos: [], fee: undefined, tip: undefined }; +} + +export const AuthInfo = { + encode(message: AuthInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.signerInfos) { + SignerInfo.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fee !== undefined) { + Fee.encode(message.fee, writer.uint32(18).fork()).ldelim(); + } + if (message.tip !== undefined) { + Tip.encode(message.tip, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AuthInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAuthInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signerInfos.push(SignerInfo.decode(reader, reader.uint32())); + break; + case 2: + message.fee = Fee.decode(reader, reader.uint32()); + break; + case 3: + message.tip = Tip.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): AuthInfo { + return { + signerInfos: Array.isArray(object?.signerInfos) ? object.signerInfos.map((e: any) => SignerInfo.fromJSON(e)) : [], + fee: isSet(object.fee) ? Fee.fromJSON(object.fee) : undefined, + tip: isSet(object.tip) ? Tip.fromJSON(object.tip) : undefined, + }; + }, + + toJSON(message: AuthInfo): unknown { + const obj: any = {}; + if (message.signerInfos) { + obj.signerInfos = message.signerInfos.map((e) => e ? SignerInfo.toJSON(e) : undefined); + } else { + obj.signerInfos = []; + } + message.fee !== undefined && (obj.fee = message.fee ? Fee.toJSON(message.fee) : undefined); + message.tip !== undefined && (obj.tip = message.tip ? Tip.toJSON(message.tip) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): AuthInfo { + const message = createBaseAuthInfo(); + message.signerInfos = object.signerInfos?.map((e) => SignerInfo.fromPartial(e)) || []; + message.fee = (object.fee !== undefined && object.fee !== null) ? Fee.fromPartial(object.fee) : undefined; + message.tip = (object.tip !== undefined && object.tip !== null) ? Tip.fromPartial(object.tip) : undefined; + return message; + }, +}; + +function createBaseSignerInfo(): SignerInfo { + return { publicKey: undefined, modeInfo: undefined, sequence: 0 }; +} + +export const SignerInfo = { + encode(message: SignerInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.publicKey !== undefined) { + Any.encode(message.publicKey, writer.uint32(10).fork()).ldelim(); + } + if (message.modeInfo !== undefined) { + ModeInfo.encode(message.modeInfo, writer.uint32(18).fork()).ldelim(); + } + if (message.sequence !== 0) { + writer.uint32(24).uint64(message.sequence); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignerInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignerInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.publicKey = Any.decode(reader, reader.uint32()); + break; + case 2: + message.modeInfo = ModeInfo.decode(reader, reader.uint32()); + break; + case 3: + message.sequence = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SignerInfo { + return { + publicKey: isSet(object.publicKey) ? Any.fromJSON(object.publicKey) : undefined, + modeInfo: isSet(object.modeInfo) ? ModeInfo.fromJSON(object.modeInfo) : undefined, + sequence: isSet(object.sequence) ? Number(object.sequence) : 0, + }; + }, + + toJSON(message: SignerInfo): unknown { + const obj: any = {}; + message.publicKey !== undefined && (obj.publicKey = message.publicKey ? Any.toJSON(message.publicKey) : undefined); + message.modeInfo !== undefined && (obj.modeInfo = message.modeInfo ? ModeInfo.toJSON(message.modeInfo) : undefined); + message.sequence !== undefined && (obj.sequence = Math.round(message.sequence)); + return obj; + }, + + fromPartial, I>>(object: I): SignerInfo { + const message = createBaseSignerInfo(); + message.publicKey = (object.publicKey !== undefined && object.publicKey !== null) + ? Any.fromPartial(object.publicKey) + : undefined; + message.modeInfo = (object.modeInfo !== undefined && object.modeInfo !== null) + ? ModeInfo.fromPartial(object.modeInfo) + : undefined; + message.sequence = object.sequence ?? 0; + return message; + }, +}; + +function createBaseModeInfo(): ModeInfo { + return { single: undefined, multi: undefined }; +} + +export const ModeInfo = { + encode(message: ModeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.single !== undefined) { + ModeInfo_Single.encode(message.single, writer.uint32(10).fork()).ldelim(); + } + if (message.multi !== undefined) { + ModeInfo_Multi.encode(message.multi, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.single = ModeInfo_Single.decode(reader, reader.uint32()); + break; + case 2: + message.multi = ModeInfo_Multi.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ModeInfo { + return { + single: isSet(object.single) ? ModeInfo_Single.fromJSON(object.single) : undefined, + multi: isSet(object.multi) ? ModeInfo_Multi.fromJSON(object.multi) : undefined, + }; + }, + + toJSON(message: ModeInfo): unknown { + const obj: any = {}; + message.single !== undefined && (obj.single = message.single ? ModeInfo_Single.toJSON(message.single) : undefined); + message.multi !== undefined && (obj.multi = message.multi ? ModeInfo_Multi.toJSON(message.multi) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ModeInfo { + const message = createBaseModeInfo(); + message.single = (object.single !== undefined && object.single !== null) + ? ModeInfo_Single.fromPartial(object.single) + : undefined; + message.multi = (object.multi !== undefined && object.multi !== null) + ? ModeInfo_Multi.fromPartial(object.multi) + : undefined; + return message; + }, +}; + +function createBaseModeInfo_Single(): ModeInfo_Single { + return { mode: 0 }; +} + +export const ModeInfo_Single = { + encode(message: ModeInfo_Single, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.mode !== 0) { + writer.uint32(8).int32(message.mode); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModeInfo_Single { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModeInfo_Single(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.mode = reader.int32() as any; + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ModeInfo_Single { + return { mode: isSet(object.mode) ? signModeFromJSON(object.mode) : 0 }; + }, + + toJSON(message: ModeInfo_Single): unknown { + const obj: any = {}; + message.mode !== undefined && (obj.mode = signModeToJSON(message.mode)); + return obj; + }, + + fromPartial, I>>(object: I): ModeInfo_Single { + const message = createBaseModeInfo_Single(); + message.mode = object.mode ?? 0; + return message; + }, +}; + +function createBaseModeInfo_Multi(): ModeInfo_Multi { + return { bitarray: undefined, modeInfos: [] }; +} + +export const ModeInfo_Multi = { + encode(message: ModeInfo_Multi, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.bitarray !== undefined) { + CompactBitArray.encode(message.bitarray, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.modeInfos) { + ModeInfo.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModeInfo_Multi { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModeInfo_Multi(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.bitarray = CompactBitArray.decode(reader, reader.uint32()); + break; + case 2: + message.modeInfos.push(ModeInfo.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ModeInfo_Multi { + return { + bitarray: isSet(object.bitarray) ? CompactBitArray.fromJSON(object.bitarray) : undefined, + modeInfos: Array.isArray(object?.modeInfos) ? object.modeInfos.map((e: any) => ModeInfo.fromJSON(e)) : [], + }; + }, + + toJSON(message: ModeInfo_Multi): unknown { + const obj: any = {}; + message.bitarray !== undefined + && (obj.bitarray = message.bitarray ? CompactBitArray.toJSON(message.bitarray) : undefined); + if (message.modeInfos) { + obj.modeInfos = message.modeInfos.map((e) => e ? ModeInfo.toJSON(e) : undefined); + } else { + obj.modeInfos = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ModeInfo_Multi { + const message = createBaseModeInfo_Multi(); + message.bitarray = (object.bitarray !== undefined && object.bitarray !== null) + ? CompactBitArray.fromPartial(object.bitarray) + : undefined; + message.modeInfos = object.modeInfos?.map((e) => ModeInfo.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFee(): Fee { + return { amount: [], gasLimit: 0, payer: "", granter: "" }; +} + +export const Fee = { + encode(message: Fee, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.gasLimit !== 0) { + writer.uint32(16).uint64(message.gasLimit); + } + if (message.payer !== "") { + writer.uint32(26).string(message.payer); + } + if (message.granter !== "") { + writer.uint32(34).string(message.granter); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Fee { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFee(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + case 2: + message.gasLimit = longToNumber(reader.uint64() as Long); + break; + case 3: + message.payer = reader.string(); + break; + case 4: + message.granter = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Fee { + return { + amount: Array.isArray(object?.amount) ? object.amount.map((e: any) => Coin.fromJSON(e)) : [], + gasLimit: isSet(object.gasLimit) ? Number(object.gasLimit) : 0, + payer: isSet(object.payer) ? String(object.payer) : "", + granter: isSet(object.granter) ? String(object.granter) : "", + }; + }, + + toJSON(message: Fee): unknown { + const obj: any = {}; + if (message.amount) { + obj.amount = message.amount.map((e) => e ? Coin.toJSON(e) : undefined); + } else { + obj.amount = []; + } + message.gasLimit !== undefined && (obj.gasLimit = Math.round(message.gasLimit)); + message.payer !== undefined && (obj.payer = message.payer); + message.granter !== undefined && (obj.granter = message.granter); + return obj; + }, + + fromPartial, I>>(object: I): Fee { + const message = createBaseFee(); + message.amount = object.amount?.map((e) => Coin.fromPartial(e)) || []; + message.gasLimit = object.gasLimit ?? 0; + message.payer = object.payer ?? ""; + message.granter = object.granter ?? ""; + return message; + }, +}; + +function createBaseTip(): Tip { + return { amount: [], tipper: "" }; +} + +export const Tip = { + encode(message: Tip, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.tipper !== "") { + writer.uint32(18).string(message.tipper); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Tip { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTip(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + case 2: + message.tipper = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Tip { + return { + amount: Array.isArray(object?.amount) ? object.amount.map((e: any) => Coin.fromJSON(e)) : [], + tipper: isSet(object.tipper) ? String(object.tipper) : "", + }; + }, + + toJSON(message: Tip): unknown { + const obj: any = {}; + if (message.amount) { + obj.amount = message.amount.map((e) => e ? Coin.toJSON(e) : undefined); + } else { + obj.amount = []; + } + message.tipper !== undefined && (obj.tipper = message.tipper); + return obj; + }, + + fromPartial, I>>(object: I): Tip { + const message = createBaseTip(); + message.amount = object.amount?.map((e) => Coin.fromPartial(e)) || []; + message.tipper = object.tipper ?? ""; + return message; + }, +}; + +function createBaseAuxSignerData(): AuxSignerData { + return { address: "", signDoc: undefined, mode: 0, sig: new Uint8Array() }; +} + +export const AuxSignerData = { + encode(message: AuxSignerData, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + if (message.signDoc !== undefined) { + SignDocDirectAux.encode(message.signDoc, writer.uint32(18).fork()).ldelim(); + } + if (message.mode !== 0) { + writer.uint32(24).int32(message.mode); + } + if (message.sig.length !== 0) { + writer.uint32(34).bytes(message.sig); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AuxSignerData { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAuxSignerData(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + case 2: + message.signDoc = SignDocDirectAux.decode(reader, reader.uint32()); + break; + case 3: + message.mode = reader.int32() as any; + break; + case 4: + message.sig = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): AuxSignerData { + return { + address: isSet(object.address) ? String(object.address) : "", + signDoc: isSet(object.signDoc) ? SignDocDirectAux.fromJSON(object.signDoc) : undefined, + mode: isSet(object.mode) ? signModeFromJSON(object.mode) : 0, + sig: isSet(object.sig) ? bytesFromBase64(object.sig) : new Uint8Array(), + }; + }, + + toJSON(message: AuxSignerData): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + message.signDoc !== undefined + && (obj.signDoc = message.signDoc ? SignDocDirectAux.toJSON(message.signDoc) : undefined); + message.mode !== undefined && (obj.mode = signModeToJSON(message.mode)); + message.sig !== undefined + && (obj.sig = base64FromBytes(message.sig !== undefined ? message.sig : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): AuxSignerData { + const message = createBaseAuxSignerData(); + message.address = object.address ?? ""; + message.signDoc = (object.signDoc !== undefined && object.signDoc !== null) + ? SignDocDirectAux.fromPartial(object.signDoc) + : undefined; + message.mode = object.mode ?? 0; + message.sig = object.sig ?? new Uint8Array(); + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/cosmos_proto/cosmos.ts b/ts-client/cosmos.tx.v1beta1/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..79c8d3486 --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/cosmos_proto/cosmos.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/gogoproto/gogo.ts b/ts-client/cosmos.tx.v1beta1/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/cosmos.tx.v1beta1/types/google/api/annotations.ts b/ts-client/cosmos.tx.v1beta1/types/google/api/annotations.ts new file mode 100644 index 000000000..aace4787f --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/google/api/annotations.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "google.api"; diff --git a/ts-client/cosmos.tx.v1beta1/types/google/api/http.ts b/ts-client/cosmos.tx.v1beta1/types/google/api/http.ts new file mode 100644 index 000000000..17e770d15 --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/google/api/http.ts @@ -0,0 +1,589 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get: + | string + | undefined; + /** Used for updating a resource. */ + put: + | string + | undefined; + /** Used for creating a resource. */ + post: + | string + | undefined; + /** Used for deleting a resource. */ + delete: + | string + | undefined; + /** Used for updating a resource. */ + patch: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined + && (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined + && (obj.custom = message.custom ? CustomHttpPattern.toJSON(message.custom) : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.additionalBindings = []; + } + return obj; + }, + + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/google/protobuf/any.ts b/ts-client/cosmos.tx.v1beta1/types/google/protobuf/any.ts new file mode 100644 index 000000000..c4ebf38be --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/google/protobuf/any.ts @@ -0,0 +1,240 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := anypb.New(foo) + * if err != nil { + * ... + * } + * ... + * foo := &pb.Foo{} + * if err := any.UnmarshalTo(foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface Any { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + typeUrl: string; + /** Must be a valid serialized protocol buffer of the above specified type. */ + value: Uint8Array; +} + +function createBaseAny(): Any { + return { typeUrl: "", value: new Uint8Array() }; +} + +export const Any = { + encode(message: Any, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.typeUrl !== "") { + writer.uint32(10).string(message.typeUrl); + } + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Any { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAny(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.typeUrl = reader.string(); + break; + case 2: + message.value = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Any { + return { + typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "", + value: isSet(object.value) ? bytesFromBase64(object.value) : new Uint8Array(), + }; + }, + + toJSON(message: Any): unknown { + const obj: any = {}; + message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl); + message.value !== undefined + && (obj.value = base64FromBytes(message.value !== undefined ? message.value : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): Any { + const message = createBaseAny(); + message.typeUrl = object.typeUrl ?? ""; + message.value = object.value ?? new Uint8Array(); + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/google/protobuf/descriptor.ts b/ts-client/cosmos.tx.v1beta1/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/google/protobuf/duration.ts b/ts-client/cosmos.tx.v1beta1/types/google/protobuf/duration.ts new file mode 100644 index 000000000..70ce816b7 --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/google/protobuf/duration.ts @@ -0,0 +1,187 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * A Duration represents a signed, fixed-length span of time represented + * as a count of seconds and fractions of seconds at nanosecond + * resolution. It is independent of any calendar and concepts like "day" + * or "month". It is related to Timestamp in that the difference between + * two Timestamp values is a Duration and it can be added or subtracted + * from a Timestamp. Range is approximately +-10,000 years. + * + * # Examples + * + * Example 1: Compute Duration from two Timestamps in pseudo code. + * + * Timestamp start = ...; + * Timestamp end = ...; + * Duration duration = ...; + * + * duration.seconds = end.seconds - start.seconds; + * duration.nanos = end.nanos - start.nanos; + * + * if (duration.seconds < 0 && duration.nanos > 0) { + * duration.seconds += 1; + * duration.nanos -= 1000000000; + * } else if (duration.seconds > 0 && duration.nanos < 0) { + * duration.seconds -= 1; + * duration.nanos += 1000000000; + * } + * + * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + * + * Timestamp start = ...; + * Duration duration = ...; + * Timestamp end = ...; + * + * end.seconds = start.seconds + duration.seconds; + * end.nanos = start.nanos + duration.nanos; + * + * if (end.nanos < 0) { + * end.seconds -= 1; + * end.nanos += 1000000000; + * } else if (end.nanos >= 1000000000) { + * end.seconds += 1; + * end.nanos -= 1000000000; + * } + * + * Example 3: Compute Duration from datetime.timedelta in Python. + * + * td = datetime.timedelta(days=3, minutes=10) + * duration = Duration() + * duration.FromTimedelta(td) + * + * # JSON Mapping + * + * In JSON format, the Duration type is encoded as a string rather than an + * object, where the string ends in the suffix "s" (indicating seconds) and + * is preceded by the number of seconds, with nanoseconds expressed as + * fractional seconds. For example, 3 seconds with 0 nanoseconds should be + * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + * be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + * microsecond should be expressed in JSON format as "3.000001s". + */ +export interface Duration { + /** + * Signed seconds of the span of time. Must be from -315,576,000,000 + * to +315,576,000,000 inclusive. Note: these bounds are computed from: + * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + */ + seconds: number; + /** + * Signed fractions of a second at nanosecond resolution of the span + * of time. Durations less than one second are represented with a 0 + * `seconds` field and a positive or negative `nanos` field. For durations + * of one second or more, a non-zero value for the `nanos` field must be + * of the same sign as the `seconds` field. Must be from -999,999,999 + * to +999,999,999 inclusive. + */ + nanos: number; +} + +function createBaseDuration(): Duration { + return { seconds: 0, nanos: 0 }; +} + +export const Duration = { + encode(message: Duration, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.seconds !== 0) { + writer.uint32(8).int64(message.seconds); + } + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Duration { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDuration(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.seconds = longToNumber(reader.int64() as Long); + break; + case 2: + message.nanos = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Duration { + return { + seconds: isSet(object.seconds) ? Number(object.seconds) : 0, + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; + }, + + toJSON(message: Duration): unknown { + const obj: any = {}; + message.seconds !== undefined && (obj.seconds = Math.round(message.seconds)); + message.nanos !== undefined && (obj.nanos = Math.round(message.nanos)); + return obj; + }, + + fromPartial, I>>(object: I): Duration { + const message = createBaseDuration(); + message.seconds = object.seconds ?? 0; + message.nanos = object.nanos ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/google/protobuf/timestamp.ts b/ts-client/cosmos.tx.v1beta1/types/google/protobuf/timestamp.ts new file mode 100644 index 000000000..b00bb1b72 --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/google/protobuf/timestamp.ts @@ -0,0 +1,216 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * Example 5: Compute Timestamp from Java `Instant.now()`. + * + * Instant now = Instant.now(); + * + * Timestamp timestamp = + * Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + * .setNanos(now.getNano()).build(); + * + * Example 6: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ +export interface Timestamp { + /** + * Represents seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + */ + seconds: number; + /** + * Non-negative fractions of a second at nanosecond resolution. Negative + * second values with fractions must still have non-negative nanos values + * that count forward in time. Must be from 0 to 999,999,999 + * inclusive. + */ + nanos: number; +} + +function createBaseTimestamp(): Timestamp { + return { seconds: 0, nanos: 0 }; +} + +export const Timestamp = { + encode(message: Timestamp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.seconds !== 0) { + writer.uint32(8).int64(message.seconds); + } + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Timestamp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTimestamp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.seconds = longToNumber(reader.int64() as Long); + break; + case 2: + message.nanos = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Timestamp { + return { + seconds: isSet(object.seconds) ? Number(object.seconds) : 0, + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; + }, + + toJSON(message: Timestamp): unknown { + const obj: any = {}; + message.seconds !== undefined && (obj.seconds = Math.round(message.seconds)); + message.nanos !== undefined && (obj.nanos = Math.round(message.nanos)); + return obj; + }, + + fromPartial, I>>(object: I): Timestamp { + const message = createBaseTimestamp(); + message.seconds = object.seconds ?? 0; + message.nanos = object.nanos ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/tendermint/abci/types.ts b/ts-client/cosmos.tx.v1beta1/types/tendermint/abci/types.ts new file mode 100644 index 000000000..a5db53b4c --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/tendermint/abci/types.ts @@ -0,0 +1,4525 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Timestamp } from "../../google/protobuf/timestamp"; +import { PublicKey } from "../crypto/keys"; +import { ProofOps } from "../crypto/proof"; +import { ConsensusParams } from "../types/params"; +import { Header } from "../types/types"; + +export const protobufPackage = "tendermint.abci"; + +export enum CheckTxType { + NEW = 0, + RECHECK = 1, + UNRECOGNIZED = -1, +} + +export function checkTxTypeFromJSON(object: any): CheckTxType { + switch (object) { + case 0: + case "NEW": + return CheckTxType.NEW; + case 1: + case "RECHECK": + return CheckTxType.RECHECK; + case -1: + case "UNRECOGNIZED": + default: + return CheckTxType.UNRECOGNIZED; + } +} + +export function checkTxTypeToJSON(object: CheckTxType): string { + switch (object) { + case CheckTxType.NEW: + return "NEW"; + case CheckTxType.RECHECK: + return "RECHECK"; + case CheckTxType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum MisbehaviorType { + UNKNOWN = 0, + DUPLICATE_VOTE = 1, + LIGHT_CLIENT_ATTACK = 2, + UNRECOGNIZED = -1, +} + +export function misbehaviorTypeFromJSON(object: any): MisbehaviorType { + switch (object) { + case 0: + case "UNKNOWN": + return MisbehaviorType.UNKNOWN; + case 1: + case "DUPLICATE_VOTE": + return MisbehaviorType.DUPLICATE_VOTE; + case 2: + case "LIGHT_CLIENT_ATTACK": + return MisbehaviorType.LIGHT_CLIENT_ATTACK; + case -1: + case "UNRECOGNIZED": + default: + return MisbehaviorType.UNRECOGNIZED; + } +} + +export function misbehaviorTypeToJSON(object: MisbehaviorType): string { + switch (object) { + case MisbehaviorType.UNKNOWN: + return "UNKNOWN"; + case MisbehaviorType.DUPLICATE_VOTE: + return "DUPLICATE_VOTE"; + case MisbehaviorType.LIGHT_CLIENT_ATTACK: + return "LIGHT_CLIENT_ATTACK"; + case MisbehaviorType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface Request { + echo: RequestEcho | undefined; + flush: RequestFlush | undefined; + info: RequestInfo | undefined; + initChain: RequestInitChain | undefined; + query: RequestQuery | undefined; + beginBlock: RequestBeginBlock | undefined; + checkTx: RequestCheckTx | undefined; + deliverTx: RequestDeliverTx | undefined; + endBlock: RequestEndBlock | undefined; + commit: RequestCommit | undefined; + listSnapshots: RequestListSnapshots | undefined; + offerSnapshot: RequestOfferSnapshot | undefined; + loadSnapshotChunk: RequestLoadSnapshotChunk | undefined; + applySnapshotChunk: RequestApplySnapshotChunk | undefined; + prepareProposal: RequestPrepareProposal | undefined; + processProposal: RequestProcessProposal | undefined; +} + +export interface RequestEcho { + message: string; +} + +export interface RequestFlush { +} + +export interface RequestInfo { + version: string; + blockVersion: number; + p2pVersion: number; + abciVersion: string; +} + +export interface RequestInitChain { + time: Date | undefined; + chainId: string; + consensusParams: ConsensusParams | undefined; + validators: ValidatorUpdate[]; + appStateBytes: Uint8Array; + initialHeight: number; +} + +export interface RequestQuery { + data: Uint8Array; + path: string; + height: number; + prove: boolean; +} + +export interface RequestBeginBlock { + hash: Uint8Array; + header: Header | undefined; + lastCommitInfo: CommitInfo | undefined; + byzantineValidators: Misbehavior[]; +} + +export interface RequestCheckTx { + tx: Uint8Array; + type: CheckTxType; +} + +export interface RequestDeliverTx { + tx: Uint8Array; +} + +export interface RequestEndBlock { + height: number; +} + +export interface RequestCommit { +} + +/** lists available snapshots */ +export interface RequestListSnapshots { +} + +/** offers a snapshot to the application */ +export interface RequestOfferSnapshot { + /** snapshot offered by peers */ + snapshot: + | Snapshot + | undefined; + /** light client-verified app hash for snapshot height */ + appHash: Uint8Array; +} + +/** loads a snapshot chunk */ +export interface RequestLoadSnapshotChunk { + height: number; + format: number; + chunk: number; +} + +/** Applies a snapshot chunk */ +export interface RequestApplySnapshotChunk { + index: number; + chunk: Uint8Array; + sender: string; +} + +export interface RequestPrepareProposal { + /** the modified transactions cannot exceed this size. */ + maxTxBytes: number; + /** + * txs is an array of transactions that will be included in a block, + * sent to the app for possible modifications. + */ + txs: Uint8Array[]; + localLastCommit: ExtendedCommitInfo | undefined; + misbehavior: Misbehavior[]; + height: number; + time: Date | undefined; + nextValidatorsHash: Uint8Array; + /** address of the public key of the validator proposing the block. */ + proposerAddress: Uint8Array; +} + +export interface RequestProcessProposal { + txs: Uint8Array[]; + proposedLastCommit: CommitInfo | undefined; + misbehavior: Misbehavior[]; + /** hash is the merkle root hash of the fields of the proposed block. */ + hash: Uint8Array; + height: number; + time: Date | undefined; + nextValidatorsHash: Uint8Array; + /** address of the public key of the original proposer of the block. */ + proposerAddress: Uint8Array; +} + +export interface Response { + exception: ResponseException | undefined; + echo: ResponseEcho | undefined; + flush: ResponseFlush | undefined; + info: ResponseInfo | undefined; + initChain: ResponseInitChain | undefined; + query: ResponseQuery | undefined; + beginBlock: ResponseBeginBlock | undefined; + checkTx: ResponseCheckTx | undefined; + deliverTx: ResponseDeliverTx | undefined; + endBlock: ResponseEndBlock | undefined; + commit: ResponseCommit | undefined; + listSnapshots: ResponseListSnapshots | undefined; + offerSnapshot: ResponseOfferSnapshot | undefined; + loadSnapshotChunk: ResponseLoadSnapshotChunk | undefined; + applySnapshotChunk: ResponseApplySnapshotChunk | undefined; + prepareProposal: ResponsePrepareProposal | undefined; + processProposal: ResponseProcessProposal | undefined; +} + +/** nondeterministic */ +export interface ResponseException { + error: string; +} + +export interface ResponseEcho { + message: string; +} + +export interface ResponseFlush { +} + +export interface ResponseInfo { + data: string; + version: string; + appVersion: number; + lastBlockHeight: number; + lastBlockAppHash: Uint8Array; +} + +export interface ResponseInitChain { + consensusParams: ConsensusParams | undefined; + validators: ValidatorUpdate[]; + appHash: Uint8Array; +} + +export interface ResponseQuery { + code: number; + /** bytes data = 2; // use "value" instead. */ + log: string; + /** nondeterministic */ + info: string; + index: number; + key: Uint8Array; + value: Uint8Array; + proofOps: ProofOps | undefined; + height: number; + codespace: string; +} + +export interface ResponseBeginBlock { + events: Event[]; +} + +export interface ResponseCheckTx { + code: number; + data: Uint8Array; + /** nondeterministic */ + log: string; + /** nondeterministic */ + info: string; + gasWanted: number; + gasUsed: number; + events: Event[]; + codespace: string; + sender: string; + priority: number; + /** + * mempool_error is set by CometBFT. + * ABCI applictions creating a ResponseCheckTX should not set mempool_error. + */ + mempoolError: string; +} + +export interface ResponseDeliverTx { + code: number; + data: Uint8Array; + /** nondeterministic */ + log: string; + /** nondeterministic */ + info: string; + gasWanted: number; + gasUsed: number; + /** nondeterministic */ + events: Event[]; + codespace: string; +} + +export interface ResponseEndBlock { + validatorUpdates: ValidatorUpdate[]; + consensusParamUpdates: ConsensusParams | undefined; + events: Event[]; +} + +export interface ResponseCommit { + /** reserve 1 */ + data: Uint8Array; + retainHeight: number; +} + +export interface ResponseListSnapshots { + snapshots: Snapshot[]; +} + +export interface ResponseOfferSnapshot { + result: ResponseOfferSnapshot_Result; +} + +export enum ResponseOfferSnapshot_Result { + /** UNKNOWN - Unknown result, abort all snapshot restoration */ + UNKNOWN = 0, + /** ACCEPT - Snapshot accepted, apply chunks */ + ACCEPT = 1, + /** ABORT - Abort all snapshot restoration */ + ABORT = 2, + /** REJECT - Reject this specific snapshot, try others */ + REJECT = 3, + /** REJECT_FORMAT - Reject all snapshots of this format, try others */ + REJECT_FORMAT = 4, + /** REJECT_SENDER - Reject all snapshots from the sender(s), try others */ + REJECT_SENDER = 5, + UNRECOGNIZED = -1, +} + +export function responseOfferSnapshot_ResultFromJSON(object: any): ResponseOfferSnapshot_Result { + switch (object) { + case 0: + case "UNKNOWN": + return ResponseOfferSnapshot_Result.UNKNOWN; + case 1: + case "ACCEPT": + return ResponseOfferSnapshot_Result.ACCEPT; + case 2: + case "ABORT": + return ResponseOfferSnapshot_Result.ABORT; + case 3: + case "REJECT": + return ResponseOfferSnapshot_Result.REJECT; + case 4: + case "REJECT_FORMAT": + return ResponseOfferSnapshot_Result.REJECT_FORMAT; + case 5: + case "REJECT_SENDER": + return ResponseOfferSnapshot_Result.REJECT_SENDER; + case -1: + case "UNRECOGNIZED": + default: + return ResponseOfferSnapshot_Result.UNRECOGNIZED; + } +} + +export function responseOfferSnapshot_ResultToJSON(object: ResponseOfferSnapshot_Result): string { + switch (object) { + case ResponseOfferSnapshot_Result.UNKNOWN: + return "UNKNOWN"; + case ResponseOfferSnapshot_Result.ACCEPT: + return "ACCEPT"; + case ResponseOfferSnapshot_Result.ABORT: + return "ABORT"; + case ResponseOfferSnapshot_Result.REJECT: + return "REJECT"; + case ResponseOfferSnapshot_Result.REJECT_FORMAT: + return "REJECT_FORMAT"; + case ResponseOfferSnapshot_Result.REJECT_SENDER: + return "REJECT_SENDER"; + case ResponseOfferSnapshot_Result.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface ResponseLoadSnapshotChunk { + chunk: Uint8Array; +} + +export interface ResponseApplySnapshotChunk { + result: ResponseApplySnapshotChunk_Result; + /** Chunks to refetch and reapply */ + refetchChunks: number[]; + /** Chunk senders to reject and ban */ + rejectSenders: string[]; +} + +export enum ResponseApplySnapshotChunk_Result { + /** UNKNOWN - Unknown result, abort all snapshot restoration */ + UNKNOWN = 0, + /** ACCEPT - Chunk successfully accepted */ + ACCEPT = 1, + /** ABORT - Abort all snapshot restoration */ + ABORT = 2, + /** RETRY - Retry chunk (combine with refetch and reject) */ + RETRY = 3, + /** RETRY_SNAPSHOT - Retry snapshot (combine with refetch and reject) */ + RETRY_SNAPSHOT = 4, + /** REJECT_SNAPSHOT - Reject this snapshot, try others */ + REJECT_SNAPSHOT = 5, + UNRECOGNIZED = -1, +} + +export function responseApplySnapshotChunk_ResultFromJSON(object: any): ResponseApplySnapshotChunk_Result { + switch (object) { + case 0: + case "UNKNOWN": + return ResponseApplySnapshotChunk_Result.UNKNOWN; + case 1: + case "ACCEPT": + return ResponseApplySnapshotChunk_Result.ACCEPT; + case 2: + case "ABORT": + return ResponseApplySnapshotChunk_Result.ABORT; + case 3: + case "RETRY": + return ResponseApplySnapshotChunk_Result.RETRY; + case 4: + case "RETRY_SNAPSHOT": + return ResponseApplySnapshotChunk_Result.RETRY_SNAPSHOT; + case 5: + case "REJECT_SNAPSHOT": + return ResponseApplySnapshotChunk_Result.REJECT_SNAPSHOT; + case -1: + case "UNRECOGNIZED": + default: + return ResponseApplySnapshotChunk_Result.UNRECOGNIZED; + } +} + +export function responseApplySnapshotChunk_ResultToJSON(object: ResponseApplySnapshotChunk_Result): string { + switch (object) { + case ResponseApplySnapshotChunk_Result.UNKNOWN: + return "UNKNOWN"; + case ResponseApplySnapshotChunk_Result.ACCEPT: + return "ACCEPT"; + case ResponseApplySnapshotChunk_Result.ABORT: + return "ABORT"; + case ResponseApplySnapshotChunk_Result.RETRY: + return "RETRY"; + case ResponseApplySnapshotChunk_Result.RETRY_SNAPSHOT: + return "RETRY_SNAPSHOT"; + case ResponseApplySnapshotChunk_Result.REJECT_SNAPSHOT: + return "REJECT_SNAPSHOT"; + case ResponseApplySnapshotChunk_Result.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface ResponsePrepareProposal { + txs: Uint8Array[]; +} + +export interface ResponseProcessProposal { + status: ResponseProcessProposal_ProposalStatus; +} + +export enum ResponseProcessProposal_ProposalStatus { + UNKNOWN = 0, + ACCEPT = 1, + REJECT = 2, + UNRECOGNIZED = -1, +} + +export function responseProcessProposal_ProposalStatusFromJSON(object: any): ResponseProcessProposal_ProposalStatus { + switch (object) { + case 0: + case "UNKNOWN": + return ResponseProcessProposal_ProposalStatus.UNKNOWN; + case 1: + case "ACCEPT": + return ResponseProcessProposal_ProposalStatus.ACCEPT; + case 2: + case "REJECT": + return ResponseProcessProposal_ProposalStatus.REJECT; + case -1: + case "UNRECOGNIZED": + default: + return ResponseProcessProposal_ProposalStatus.UNRECOGNIZED; + } +} + +export function responseProcessProposal_ProposalStatusToJSON(object: ResponseProcessProposal_ProposalStatus): string { + switch (object) { + case ResponseProcessProposal_ProposalStatus.UNKNOWN: + return "UNKNOWN"; + case ResponseProcessProposal_ProposalStatus.ACCEPT: + return "ACCEPT"; + case ResponseProcessProposal_ProposalStatus.REJECT: + return "REJECT"; + case ResponseProcessProposal_ProposalStatus.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface CommitInfo { + round: number; + votes: VoteInfo[]; +} + +export interface ExtendedCommitInfo { + /** The round at which the block proposer decided in the previous height. */ + round: number; + /** + * List of validators' addresses in the last validator set with their voting + * information, including vote extensions. + */ + votes: ExtendedVoteInfo[]; +} + +/** + * Event allows application developers to attach additional information to + * ResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx. + * Later, transactions may be queried using these events. + */ +export interface Event { + type: string; + attributes: EventAttribute[]; +} + +/** EventAttribute is a single key-value pair, associated with an event. */ +export interface EventAttribute { + key: string; + value: string; + /** nondeterministic */ + index: boolean; +} + +/** + * TxResult contains results of executing the transaction. + * + * One usage is indexing transaction results. + */ +export interface TxResult { + height: number; + index: number; + tx: Uint8Array; + result: ResponseDeliverTx | undefined; +} + +/** Validator */ +export interface Validator { + /** The first 20 bytes of SHA256(public key) */ + address: Uint8Array; + /** PubKey pub_key = 2 [(gogoproto.nullable)=false]; */ + power: number; +} + +/** ValidatorUpdate */ +export interface ValidatorUpdate { + pubKey: PublicKey | undefined; + power: number; +} + +/** VoteInfo */ +export interface VoteInfo { + validator: Validator | undefined; + signedLastBlock: boolean; +} + +export interface ExtendedVoteInfo { + validator: Validator | undefined; + signedLastBlock: boolean; + /** Reserved for future use */ + voteExtension: Uint8Array; +} + +export interface Misbehavior { + type: MisbehaviorType; + /** The offending validator */ + validator: + | Validator + | undefined; + /** The height when the offense occurred */ + height: number; + /** The corresponding time where the offense occurred */ + time: + | Date + | undefined; + /** + * Total voting power of the validator set in case the ABCI application does + * not store historical validators. + * https://github.com/tendermint/tendermint/issues/4581 + */ + totalVotingPower: number; +} + +export interface Snapshot { + /** The height at which the snapshot was taken */ + height: number; + /** The application-specific snapshot format */ + format: number; + /** Number of chunks in the snapshot */ + chunks: number; + /** Arbitrary snapshot hash, equal only if identical */ + hash: Uint8Array; + /** Arbitrary application metadata */ + metadata: Uint8Array; +} + +function createBaseRequest(): Request { + return { + echo: undefined, + flush: undefined, + info: undefined, + initChain: undefined, + query: undefined, + beginBlock: undefined, + checkTx: undefined, + deliverTx: undefined, + endBlock: undefined, + commit: undefined, + listSnapshots: undefined, + offerSnapshot: undefined, + loadSnapshotChunk: undefined, + applySnapshotChunk: undefined, + prepareProposal: undefined, + processProposal: undefined, + }; +} + +export const Request = { + encode(message: Request, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.echo !== undefined) { + RequestEcho.encode(message.echo, writer.uint32(10).fork()).ldelim(); + } + if (message.flush !== undefined) { + RequestFlush.encode(message.flush, writer.uint32(18).fork()).ldelim(); + } + if (message.info !== undefined) { + RequestInfo.encode(message.info, writer.uint32(26).fork()).ldelim(); + } + if (message.initChain !== undefined) { + RequestInitChain.encode(message.initChain, writer.uint32(42).fork()).ldelim(); + } + if (message.query !== undefined) { + RequestQuery.encode(message.query, writer.uint32(50).fork()).ldelim(); + } + if (message.beginBlock !== undefined) { + RequestBeginBlock.encode(message.beginBlock, writer.uint32(58).fork()).ldelim(); + } + if (message.checkTx !== undefined) { + RequestCheckTx.encode(message.checkTx, writer.uint32(66).fork()).ldelim(); + } + if (message.deliverTx !== undefined) { + RequestDeliverTx.encode(message.deliverTx, writer.uint32(74).fork()).ldelim(); + } + if (message.endBlock !== undefined) { + RequestEndBlock.encode(message.endBlock, writer.uint32(82).fork()).ldelim(); + } + if (message.commit !== undefined) { + RequestCommit.encode(message.commit, writer.uint32(90).fork()).ldelim(); + } + if (message.listSnapshots !== undefined) { + RequestListSnapshots.encode(message.listSnapshots, writer.uint32(98).fork()).ldelim(); + } + if (message.offerSnapshot !== undefined) { + RequestOfferSnapshot.encode(message.offerSnapshot, writer.uint32(106).fork()).ldelim(); + } + if (message.loadSnapshotChunk !== undefined) { + RequestLoadSnapshotChunk.encode(message.loadSnapshotChunk, writer.uint32(114).fork()).ldelim(); + } + if (message.applySnapshotChunk !== undefined) { + RequestApplySnapshotChunk.encode(message.applySnapshotChunk, writer.uint32(122).fork()).ldelim(); + } + if (message.prepareProposal !== undefined) { + RequestPrepareProposal.encode(message.prepareProposal, writer.uint32(130).fork()).ldelim(); + } + if (message.processProposal !== undefined) { + RequestProcessProposal.encode(message.processProposal, writer.uint32(138).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Request { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.echo = RequestEcho.decode(reader, reader.uint32()); + break; + case 2: + message.flush = RequestFlush.decode(reader, reader.uint32()); + break; + case 3: + message.info = RequestInfo.decode(reader, reader.uint32()); + break; + case 5: + message.initChain = RequestInitChain.decode(reader, reader.uint32()); + break; + case 6: + message.query = RequestQuery.decode(reader, reader.uint32()); + break; + case 7: + message.beginBlock = RequestBeginBlock.decode(reader, reader.uint32()); + break; + case 8: + message.checkTx = RequestCheckTx.decode(reader, reader.uint32()); + break; + case 9: + message.deliverTx = RequestDeliverTx.decode(reader, reader.uint32()); + break; + case 10: + message.endBlock = RequestEndBlock.decode(reader, reader.uint32()); + break; + case 11: + message.commit = RequestCommit.decode(reader, reader.uint32()); + break; + case 12: + message.listSnapshots = RequestListSnapshots.decode(reader, reader.uint32()); + break; + case 13: + message.offerSnapshot = RequestOfferSnapshot.decode(reader, reader.uint32()); + break; + case 14: + message.loadSnapshotChunk = RequestLoadSnapshotChunk.decode(reader, reader.uint32()); + break; + case 15: + message.applySnapshotChunk = RequestApplySnapshotChunk.decode(reader, reader.uint32()); + break; + case 16: + message.prepareProposal = RequestPrepareProposal.decode(reader, reader.uint32()); + break; + case 17: + message.processProposal = RequestProcessProposal.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Request { + return { + echo: isSet(object.echo) ? RequestEcho.fromJSON(object.echo) : undefined, + flush: isSet(object.flush) ? RequestFlush.fromJSON(object.flush) : undefined, + info: isSet(object.info) ? RequestInfo.fromJSON(object.info) : undefined, + initChain: isSet(object.initChain) ? RequestInitChain.fromJSON(object.initChain) : undefined, + query: isSet(object.query) ? RequestQuery.fromJSON(object.query) : undefined, + beginBlock: isSet(object.beginBlock) ? RequestBeginBlock.fromJSON(object.beginBlock) : undefined, + checkTx: isSet(object.checkTx) ? RequestCheckTx.fromJSON(object.checkTx) : undefined, + deliverTx: isSet(object.deliverTx) ? RequestDeliverTx.fromJSON(object.deliverTx) : undefined, + endBlock: isSet(object.endBlock) ? RequestEndBlock.fromJSON(object.endBlock) : undefined, + commit: isSet(object.commit) ? RequestCommit.fromJSON(object.commit) : undefined, + listSnapshots: isSet(object.listSnapshots) ? RequestListSnapshots.fromJSON(object.listSnapshots) : undefined, + offerSnapshot: isSet(object.offerSnapshot) ? RequestOfferSnapshot.fromJSON(object.offerSnapshot) : undefined, + loadSnapshotChunk: isSet(object.loadSnapshotChunk) + ? RequestLoadSnapshotChunk.fromJSON(object.loadSnapshotChunk) + : undefined, + applySnapshotChunk: isSet(object.applySnapshotChunk) + ? RequestApplySnapshotChunk.fromJSON(object.applySnapshotChunk) + : undefined, + prepareProposal: isSet(object.prepareProposal) + ? RequestPrepareProposal.fromJSON(object.prepareProposal) + : undefined, + processProposal: isSet(object.processProposal) + ? RequestProcessProposal.fromJSON(object.processProposal) + : undefined, + }; + }, + + toJSON(message: Request): unknown { + const obj: any = {}; + message.echo !== undefined && (obj.echo = message.echo ? RequestEcho.toJSON(message.echo) : undefined); + message.flush !== undefined && (obj.flush = message.flush ? RequestFlush.toJSON(message.flush) : undefined); + message.info !== undefined && (obj.info = message.info ? RequestInfo.toJSON(message.info) : undefined); + message.initChain !== undefined + && (obj.initChain = message.initChain ? RequestInitChain.toJSON(message.initChain) : undefined); + message.query !== undefined && (obj.query = message.query ? RequestQuery.toJSON(message.query) : undefined); + message.beginBlock !== undefined + && (obj.beginBlock = message.beginBlock ? RequestBeginBlock.toJSON(message.beginBlock) : undefined); + message.checkTx !== undefined + && (obj.checkTx = message.checkTx ? RequestCheckTx.toJSON(message.checkTx) : undefined); + message.deliverTx !== undefined + && (obj.deliverTx = message.deliverTx ? RequestDeliverTx.toJSON(message.deliverTx) : undefined); + message.endBlock !== undefined + && (obj.endBlock = message.endBlock ? RequestEndBlock.toJSON(message.endBlock) : undefined); + message.commit !== undefined && (obj.commit = message.commit ? RequestCommit.toJSON(message.commit) : undefined); + message.listSnapshots !== undefined + && (obj.listSnapshots = message.listSnapshots ? RequestListSnapshots.toJSON(message.listSnapshots) : undefined); + message.offerSnapshot !== undefined + && (obj.offerSnapshot = message.offerSnapshot ? RequestOfferSnapshot.toJSON(message.offerSnapshot) : undefined); + message.loadSnapshotChunk !== undefined && (obj.loadSnapshotChunk = message.loadSnapshotChunk + ? RequestLoadSnapshotChunk.toJSON(message.loadSnapshotChunk) + : undefined); + message.applySnapshotChunk !== undefined && (obj.applySnapshotChunk = message.applySnapshotChunk + ? RequestApplySnapshotChunk.toJSON(message.applySnapshotChunk) + : undefined); + message.prepareProposal !== undefined && (obj.prepareProposal = message.prepareProposal + ? RequestPrepareProposal.toJSON(message.prepareProposal) + : undefined); + message.processProposal !== undefined && (obj.processProposal = message.processProposal + ? RequestProcessProposal.toJSON(message.processProposal) + : undefined); + return obj; + }, + + fromPartial, I>>(object: I): Request { + const message = createBaseRequest(); + message.echo = (object.echo !== undefined && object.echo !== null) + ? RequestEcho.fromPartial(object.echo) + : undefined; + message.flush = (object.flush !== undefined && object.flush !== null) + ? RequestFlush.fromPartial(object.flush) + : undefined; + message.info = (object.info !== undefined && object.info !== null) + ? RequestInfo.fromPartial(object.info) + : undefined; + message.initChain = (object.initChain !== undefined && object.initChain !== null) + ? RequestInitChain.fromPartial(object.initChain) + : undefined; + message.query = (object.query !== undefined && object.query !== null) + ? RequestQuery.fromPartial(object.query) + : undefined; + message.beginBlock = (object.beginBlock !== undefined && object.beginBlock !== null) + ? RequestBeginBlock.fromPartial(object.beginBlock) + : undefined; + message.checkTx = (object.checkTx !== undefined && object.checkTx !== null) + ? RequestCheckTx.fromPartial(object.checkTx) + : undefined; + message.deliverTx = (object.deliverTx !== undefined && object.deliverTx !== null) + ? RequestDeliverTx.fromPartial(object.deliverTx) + : undefined; + message.endBlock = (object.endBlock !== undefined && object.endBlock !== null) + ? RequestEndBlock.fromPartial(object.endBlock) + : undefined; + message.commit = (object.commit !== undefined && object.commit !== null) + ? RequestCommit.fromPartial(object.commit) + : undefined; + message.listSnapshots = (object.listSnapshots !== undefined && object.listSnapshots !== null) + ? RequestListSnapshots.fromPartial(object.listSnapshots) + : undefined; + message.offerSnapshot = (object.offerSnapshot !== undefined && object.offerSnapshot !== null) + ? RequestOfferSnapshot.fromPartial(object.offerSnapshot) + : undefined; + message.loadSnapshotChunk = (object.loadSnapshotChunk !== undefined && object.loadSnapshotChunk !== null) + ? RequestLoadSnapshotChunk.fromPartial(object.loadSnapshotChunk) + : undefined; + message.applySnapshotChunk = (object.applySnapshotChunk !== undefined && object.applySnapshotChunk !== null) + ? RequestApplySnapshotChunk.fromPartial(object.applySnapshotChunk) + : undefined; + message.prepareProposal = (object.prepareProposal !== undefined && object.prepareProposal !== null) + ? RequestPrepareProposal.fromPartial(object.prepareProposal) + : undefined; + message.processProposal = (object.processProposal !== undefined && object.processProposal !== null) + ? RequestProcessProposal.fromPartial(object.processProposal) + : undefined; + return message; + }, +}; + +function createBaseRequestEcho(): RequestEcho { + return { message: "" }; +} + +export const RequestEcho = { + encode(message: RequestEcho, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.message !== "") { + writer.uint32(10).string(message.message); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestEcho { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestEcho(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.message = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RequestEcho { + return { message: isSet(object.message) ? String(object.message) : "" }; + }, + + toJSON(message: RequestEcho): unknown { + const obj: any = {}; + message.message !== undefined && (obj.message = message.message); + return obj; + }, + + fromPartial, I>>(object: I): RequestEcho { + const message = createBaseRequestEcho(); + message.message = object.message ?? ""; + return message; + }, +}; + +function createBaseRequestFlush(): RequestFlush { + return {}; +} + +export const RequestFlush = { + encode(_: RequestFlush, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestFlush { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestFlush(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): RequestFlush { + return {}; + }, + + toJSON(_: RequestFlush): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): RequestFlush { + const message = createBaseRequestFlush(); + return message; + }, +}; + +function createBaseRequestInfo(): RequestInfo { + return { version: "", blockVersion: 0, p2pVersion: 0, abciVersion: "" }; +} + +export const RequestInfo = { + encode(message: RequestInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.version !== "") { + writer.uint32(10).string(message.version); + } + if (message.blockVersion !== 0) { + writer.uint32(16).uint64(message.blockVersion); + } + if (message.p2pVersion !== 0) { + writer.uint32(24).uint64(message.p2pVersion); + } + if (message.abciVersion !== "") { + writer.uint32(34).string(message.abciVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.version = reader.string(); + break; + case 2: + message.blockVersion = longToNumber(reader.uint64() as Long); + break; + case 3: + message.p2pVersion = longToNumber(reader.uint64() as Long); + break; + case 4: + message.abciVersion = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RequestInfo { + return { + version: isSet(object.version) ? String(object.version) : "", + blockVersion: isSet(object.blockVersion) ? Number(object.blockVersion) : 0, + p2pVersion: isSet(object.p2pVersion) ? Number(object.p2pVersion) : 0, + abciVersion: isSet(object.abciVersion) ? String(object.abciVersion) : "", + }; + }, + + toJSON(message: RequestInfo): unknown { + const obj: any = {}; + message.version !== undefined && (obj.version = message.version); + message.blockVersion !== undefined && (obj.blockVersion = Math.round(message.blockVersion)); + message.p2pVersion !== undefined && (obj.p2pVersion = Math.round(message.p2pVersion)); + message.abciVersion !== undefined && (obj.abciVersion = message.abciVersion); + return obj; + }, + + fromPartial, I>>(object: I): RequestInfo { + const message = createBaseRequestInfo(); + message.version = object.version ?? ""; + message.blockVersion = object.blockVersion ?? 0; + message.p2pVersion = object.p2pVersion ?? 0; + message.abciVersion = object.abciVersion ?? ""; + return message; + }, +}; + +function createBaseRequestInitChain(): RequestInitChain { + return { + time: undefined, + chainId: "", + consensusParams: undefined, + validators: [], + appStateBytes: new Uint8Array(), + initialHeight: 0, + }; +} + +export const RequestInitChain = { + encode(message: RequestInitChain, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.time !== undefined) { + Timestamp.encode(toTimestamp(message.time), writer.uint32(10).fork()).ldelim(); + } + if (message.chainId !== "") { + writer.uint32(18).string(message.chainId); + } + if (message.consensusParams !== undefined) { + ConsensusParams.encode(message.consensusParams, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.validators) { + ValidatorUpdate.encode(v!, writer.uint32(34).fork()).ldelim(); + } + if (message.appStateBytes.length !== 0) { + writer.uint32(42).bytes(message.appStateBytes); + } + if (message.initialHeight !== 0) { + writer.uint32(48).int64(message.initialHeight); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestInitChain { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestInitChain(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.time = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 2: + message.chainId = reader.string(); + break; + case 3: + message.consensusParams = ConsensusParams.decode(reader, reader.uint32()); + break; + case 4: + message.validators.push(ValidatorUpdate.decode(reader, reader.uint32())); + break; + case 5: + message.appStateBytes = reader.bytes(); + break; + case 6: + message.initialHeight = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RequestInitChain { + return { + time: isSet(object.time) ? fromJsonTimestamp(object.time) : undefined, + chainId: isSet(object.chainId) ? String(object.chainId) : "", + consensusParams: isSet(object.consensusParams) ? ConsensusParams.fromJSON(object.consensusParams) : undefined, + validators: Array.isArray(object?.validators) + ? object.validators.map((e: any) => ValidatorUpdate.fromJSON(e)) + : [], + appStateBytes: isSet(object.appStateBytes) ? bytesFromBase64(object.appStateBytes) : new Uint8Array(), + initialHeight: isSet(object.initialHeight) ? Number(object.initialHeight) : 0, + }; + }, + + toJSON(message: RequestInitChain): unknown { + const obj: any = {}; + message.time !== undefined && (obj.time = message.time.toISOString()); + message.chainId !== undefined && (obj.chainId = message.chainId); + message.consensusParams !== undefined + && (obj.consensusParams = message.consensusParams ? ConsensusParams.toJSON(message.consensusParams) : undefined); + if (message.validators) { + obj.validators = message.validators.map((e) => e ? ValidatorUpdate.toJSON(e) : undefined); + } else { + obj.validators = []; + } + message.appStateBytes !== undefined + && (obj.appStateBytes = base64FromBytes( + message.appStateBytes !== undefined ? message.appStateBytes : new Uint8Array(), + )); + message.initialHeight !== undefined && (obj.initialHeight = Math.round(message.initialHeight)); + return obj; + }, + + fromPartial, I>>(object: I): RequestInitChain { + const message = createBaseRequestInitChain(); + message.time = object.time ?? undefined; + message.chainId = object.chainId ?? ""; + message.consensusParams = (object.consensusParams !== undefined && object.consensusParams !== null) + ? ConsensusParams.fromPartial(object.consensusParams) + : undefined; + message.validators = object.validators?.map((e) => ValidatorUpdate.fromPartial(e)) || []; + message.appStateBytes = object.appStateBytes ?? new Uint8Array(); + message.initialHeight = object.initialHeight ?? 0; + return message; + }, +}; + +function createBaseRequestQuery(): RequestQuery { + return { data: new Uint8Array(), path: "", height: 0, prove: false }; +} + +export const RequestQuery = { + encode(message: RequestQuery, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.data.length !== 0) { + writer.uint32(10).bytes(message.data); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + if (message.height !== 0) { + writer.uint32(24).int64(message.height); + } + if (message.prove === true) { + writer.uint32(32).bool(message.prove); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestQuery { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestQuery(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.data = reader.bytes(); + break; + case 2: + message.path = reader.string(); + break; + case 3: + message.height = longToNumber(reader.int64() as Long); + break; + case 4: + message.prove = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RequestQuery { + return { + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(), + path: isSet(object.path) ? String(object.path) : "", + height: isSet(object.height) ? Number(object.height) : 0, + prove: isSet(object.prove) ? Boolean(object.prove) : false, + }; + }, + + toJSON(message: RequestQuery): unknown { + const obj: any = {}; + message.data !== undefined + && (obj.data = base64FromBytes(message.data !== undefined ? message.data : new Uint8Array())); + message.path !== undefined && (obj.path = message.path); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.prove !== undefined && (obj.prove = message.prove); + return obj; + }, + + fromPartial, I>>(object: I): RequestQuery { + const message = createBaseRequestQuery(); + message.data = object.data ?? new Uint8Array(); + message.path = object.path ?? ""; + message.height = object.height ?? 0; + message.prove = object.prove ?? false; + return message; + }, +}; + +function createBaseRequestBeginBlock(): RequestBeginBlock { + return { hash: new Uint8Array(), header: undefined, lastCommitInfo: undefined, byzantineValidators: [] }; +} + +export const RequestBeginBlock = { + encode(message: RequestBeginBlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hash.length !== 0) { + writer.uint32(10).bytes(message.hash); + } + if (message.header !== undefined) { + Header.encode(message.header, writer.uint32(18).fork()).ldelim(); + } + if (message.lastCommitInfo !== undefined) { + CommitInfo.encode(message.lastCommitInfo, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.byzantineValidators) { + Misbehavior.encode(v!, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestBeginBlock { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestBeginBlock(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.hash = reader.bytes(); + break; + case 2: + message.header = Header.decode(reader, reader.uint32()); + break; + case 3: + message.lastCommitInfo = CommitInfo.decode(reader, reader.uint32()); + break; + case 4: + message.byzantineValidators.push(Misbehavior.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RequestBeginBlock { + return { + hash: isSet(object.hash) ? bytesFromBase64(object.hash) : new Uint8Array(), + header: isSet(object.header) ? Header.fromJSON(object.header) : undefined, + lastCommitInfo: isSet(object.lastCommitInfo) ? CommitInfo.fromJSON(object.lastCommitInfo) : undefined, + byzantineValidators: Array.isArray(object?.byzantineValidators) + ? object.byzantineValidators.map((e: any) => Misbehavior.fromJSON(e)) + : [], + }; + }, + + toJSON(message: RequestBeginBlock): unknown { + const obj: any = {}; + message.hash !== undefined + && (obj.hash = base64FromBytes(message.hash !== undefined ? message.hash : new Uint8Array())); + message.header !== undefined && (obj.header = message.header ? Header.toJSON(message.header) : undefined); + message.lastCommitInfo !== undefined + && (obj.lastCommitInfo = message.lastCommitInfo ? CommitInfo.toJSON(message.lastCommitInfo) : undefined); + if (message.byzantineValidators) { + obj.byzantineValidators = message.byzantineValidators.map((e) => e ? Misbehavior.toJSON(e) : undefined); + } else { + obj.byzantineValidators = []; + } + return obj; + }, + + fromPartial, I>>(object: I): RequestBeginBlock { + const message = createBaseRequestBeginBlock(); + message.hash = object.hash ?? new Uint8Array(); + message.header = (object.header !== undefined && object.header !== null) + ? Header.fromPartial(object.header) + : undefined; + message.lastCommitInfo = (object.lastCommitInfo !== undefined && object.lastCommitInfo !== null) + ? CommitInfo.fromPartial(object.lastCommitInfo) + : undefined; + message.byzantineValidators = object.byzantineValidators?.map((e) => Misbehavior.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseRequestCheckTx(): RequestCheckTx { + return { tx: new Uint8Array(), type: 0 }; +} + +export const RequestCheckTx = { + encode(message: RequestCheckTx, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tx.length !== 0) { + writer.uint32(10).bytes(message.tx); + } + if (message.type !== 0) { + writer.uint32(16).int32(message.type); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestCheckTx { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestCheckTx(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.tx = reader.bytes(); + break; + case 2: + message.type = reader.int32() as any; + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RequestCheckTx { + return { + tx: isSet(object.tx) ? bytesFromBase64(object.tx) : new Uint8Array(), + type: isSet(object.type) ? checkTxTypeFromJSON(object.type) : 0, + }; + }, + + toJSON(message: RequestCheckTx): unknown { + const obj: any = {}; + message.tx !== undefined && (obj.tx = base64FromBytes(message.tx !== undefined ? message.tx : new Uint8Array())); + message.type !== undefined && (obj.type = checkTxTypeToJSON(message.type)); + return obj; + }, + + fromPartial, I>>(object: I): RequestCheckTx { + const message = createBaseRequestCheckTx(); + message.tx = object.tx ?? new Uint8Array(); + message.type = object.type ?? 0; + return message; + }, +}; + +function createBaseRequestDeliverTx(): RequestDeliverTx { + return { tx: new Uint8Array() }; +} + +export const RequestDeliverTx = { + encode(message: RequestDeliverTx, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.tx.length !== 0) { + writer.uint32(10).bytes(message.tx); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestDeliverTx { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestDeliverTx(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.tx = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RequestDeliverTx { + return { tx: isSet(object.tx) ? bytesFromBase64(object.tx) : new Uint8Array() }; + }, + + toJSON(message: RequestDeliverTx): unknown { + const obj: any = {}; + message.tx !== undefined && (obj.tx = base64FromBytes(message.tx !== undefined ? message.tx : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): RequestDeliverTx { + const message = createBaseRequestDeliverTx(); + message.tx = object.tx ?? new Uint8Array(); + return message; + }, +}; + +function createBaseRequestEndBlock(): RequestEndBlock { + return { height: 0 }; +} + +export const RequestEndBlock = { + encode(message: RequestEndBlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.height !== 0) { + writer.uint32(8).int64(message.height); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestEndBlock { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestEndBlock(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.height = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RequestEndBlock { + return { height: isSet(object.height) ? Number(object.height) : 0 }; + }, + + toJSON(message: RequestEndBlock): unknown { + const obj: any = {}; + message.height !== undefined && (obj.height = Math.round(message.height)); + return obj; + }, + + fromPartial, I>>(object: I): RequestEndBlock { + const message = createBaseRequestEndBlock(); + message.height = object.height ?? 0; + return message; + }, +}; + +function createBaseRequestCommit(): RequestCommit { + return {}; +} + +export const RequestCommit = { + encode(_: RequestCommit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestCommit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestCommit(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): RequestCommit { + return {}; + }, + + toJSON(_: RequestCommit): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): RequestCommit { + const message = createBaseRequestCommit(); + return message; + }, +}; + +function createBaseRequestListSnapshots(): RequestListSnapshots { + return {}; +} + +export const RequestListSnapshots = { + encode(_: RequestListSnapshots, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestListSnapshots { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestListSnapshots(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): RequestListSnapshots { + return {}; + }, + + toJSON(_: RequestListSnapshots): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): RequestListSnapshots { + const message = createBaseRequestListSnapshots(); + return message; + }, +}; + +function createBaseRequestOfferSnapshot(): RequestOfferSnapshot { + return { snapshot: undefined, appHash: new Uint8Array() }; +} + +export const RequestOfferSnapshot = { + encode(message: RequestOfferSnapshot, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.snapshot !== undefined) { + Snapshot.encode(message.snapshot, writer.uint32(10).fork()).ldelim(); + } + if (message.appHash.length !== 0) { + writer.uint32(18).bytes(message.appHash); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestOfferSnapshot { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestOfferSnapshot(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.snapshot = Snapshot.decode(reader, reader.uint32()); + break; + case 2: + message.appHash = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RequestOfferSnapshot { + return { + snapshot: isSet(object.snapshot) ? Snapshot.fromJSON(object.snapshot) : undefined, + appHash: isSet(object.appHash) ? bytesFromBase64(object.appHash) : new Uint8Array(), + }; + }, + + toJSON(message: RequestOfferSnapshot): unknown { + const obj: any = {}; + message.snapshot !== undefined && (obj.snapshot = message.snapshot ? Snapshot.toJSON(message.snapshot) : undefined); + message.appHash !== undefined + && (obj.appHash = base64FromBytes(message.appHash !== undefined ? message.appHash : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): RequestOfferSnapshot { + const message = createBaseRequestOfferSnapshot(); + message.snapshot = (object.snapshot !== undefined && object.snapshot !== null) + ? Snapshot.fromPartial(object.snapshot) + : undefined; + message.appHash = object.appHash ?? new Uint8Array(); + return message; + }, +}; + +function createBaseRequestLoadSnapshotChunk(): RequestLoadSnapshotChunk { + return { height: 0, format: 0, chunk: 0 }; +} + +export const RequestLoadSnapshotChunk = { + encode(message: RequestLoadSnapshotChunk, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.height !== 0) { + writer.uint32(8).uint64(message.height); + } + if (message.format !== 0) { + writer.uint32(16).uint32(message.format); + } + if (message.chunk !== 0) { + writer.uint32(24).uint32(message.chunk); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestLoadSnapshotChunk { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestLoadSnapshotChunk(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.height = longToNumber(reader.uint64() as Long); + break; + case 2: + message.format = reader.uint32(); + break; + case 3: + message.chunk = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RequestLoadSnapshotChunk { + return { + height: isSet(object.height) ? Number(object.height) : 0, + format: isSet(object.format) ? Number(object.format) : 0, + chunk: isSet(object.chunk) ? Number(object.chunk) : 0, + }; + }, + + toJSON(message: RequestLoadSnapshotChunk): unknown { + const obj: any = {}; + message.height !== undefined && (obj.height = Math.round(message.height)); + message.format !== undefined && (obj.format = Math.round(message.format)); + message.chunk !== undefined && (obj.chunk = Math.round(message.chunk)); + return obj; + }, + + fromPartial, I>>(object: I): RequestLoadSnapshotChunk { + const message = createBaseRequestLoadSnapshotChunk(); + message.height = object.height ?? 0; + message.format = object.format ?? 0; + message.chunk = object.chunk ?? 0; + return message; + }, +}; + +function createBaseRequestApplySnapshotChunk(): RequestApplySnapshotChunk { + return { index: 0, chunk: new Uint8Array(), sender: "" }; +} + +export const RequestApplySnapshotChunk = { + encode(message: RequestApplySnapshotChunk, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.index !== 0) { + writer.uint32(8).uint32(message.index); + } + if (message.chunk.length !== 0) { + writer.uint32(18).bytes(message.chunk); + } + if (message.sender !== "") { + writer.uint32(26).string(message.sender); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestApplySnapshotChunk { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestApplySnapshotChunk(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.index = reader.uint32(); + break; + case 2: + message.chunk = reader.bytes(); + break; + case 3: + message.sender = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RequestApplySnapshotChunk { + return { + index: isSet(object.index) ? Number(object.index) : 0, + chunk: isSet(object.chunk) ? bytesFromBase64(object.chunk) : new Uint8Array(), + sender: isSet(object.sender) ? String(object.sender) : "", + }; + }, + + toJSON(message: RequestApplySnapshotChunk): unknown { + const obj: any = {}; + message.index !== undefined && (obj.index = Math.round(message.index)); + message.chunk !== undefined + && (obj.chunk = base64FromBytes(message.chunk !== undefined ? message.chunk : new Uint8Array())); + message.sender !== undefined && (obj.sender = message.sender); + return obj; + }, + + fromPartial, I>>(object: I): RequestApplySnapshotChunk { + const message = createBaseRequestApplySnapshotChunk(); + message.index = object.index ?? 0; + message.chunk = object.chunk ?? new Uint8Array(); + message.sender = object.sender ?? ""; + return message; + }, +}; + +function createBaseRequestPrepareProposal(): RequestPrepareProposal { + return { + maxTxBytes: 0, + txs: [], + localLastCommit: undefined, + misbehavior: [], + height: 0, + time: undefined, + nextValidatorsHash: new Uint8Array(), + proposerAddress: new Uint8Array(), + }; +} + +export const RequestPrepareProposal = { + encode(message: RequestPrepareProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.maxTxBytes !== 0) { + writer.uint32(8).int64(message.maxTxBytes); + } + for (const v of message.txs) { + writer.uint32(18).bytes(v!); + } + if (message.localLastCommit !== undefined) { + ExtendedCommitInfo.encode(message.localLastCommit, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.misbehavior) { + Misbehavior.encode(v!, writer.uint32(34).fork()).ldelim(); + } + if (message.height !== 0) { + writer.uint32(40).int64(message.height); + } + if (message.time !== undefined) { + Timestamp.encode(toTimestamp(message.time), writer.uint32(50).fork()).ldelim(); + } + if (message.nextValidatorsHash.length !== 0) { + writer.uint32(58).bytes(message.nextValidatorsHash); + } + if (message.proposerAddress.length !== 0) { + writer.uint32(66).bytes(message.proposerAddress); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestPrepareProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestPrepareProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.maxTxBytes = longToNumber(reader.int64() as Long); + break; + case 2: + message.txs.push(reader.bytes()); + break; + case 3: + message.localLastCommit = ExtendedCommitInfo.decode(reader, reader.uint32()); + break; + case 4: + message.misbehavior.push(Misbehavior.decode(reader, reader.uint32())); + break; + case 5: + message.height = longToNumber(reader.int64() as Long); + break; + case 6: + message.time = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 7: + message.nextValidatorsHash = reader.bytes(); + break; + case 8: + message.proposerAddress = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RequestPrepareProposal { + return { + maxTxBytes: isSet(object.maxTxBytes) ? Number(object.maxTxBytes) : 0, + txs: Array.isArray(object?.txs) ? object.txs.map((e: any) => bytesFromBase64(e)) : [], + localLastCommit: isSet(object.localLastCommit) ? ExtendedCommitInfo.fromJSON(object.localLastCommit) : undefined, + misbehavior: Array.isArray(object?.misbehavior) + ? object.misbehavior.map((e: any) => Misbehavior.fromJSON(e)) + : [], + height: isSet(object.height) ? Number(object.height) : 0, + time: isSet(object.time) ? fromJsonTimestamp(object.time) : undefined, + nextValidatorsHash: isSet(object.nextValidatorsHash) + ? bytesFromBase64(object.nextValidatorsHash) + : new Uint8Array(), + proposerAddress: isSet(object.proposerAddress) ? bytesFromBase64(object.proposerAddress) : new Uint8Array(), + }; + }, + + toJSON(message: RequestPrepareProposal): unknown { + const obj: any = {}; + message.maxTxBytes !== undefined && (obj.maxTxBytes = Math.round(message.maxTxBytes)); + if (message.txs) { + obj.txs = message.txs.map((e) => base64FromBytes(e !== undefined ? e : new Uint8Array())); + } else { + obj.txs = []; + } + message.localLastCommit !== undefined + && (obj.localLastCommit = message.localLastCommit + ? ExtendedCommitInfo.toJSON(message.localLastCommit) + : undefined); + if (message.misbehavior) { + obj.misbehavior = message.misbehavior.map((e) => e ? Misbehavior.toJSON(e) : undefined); + } else { + obj.misbehavior = []; + } + message.height !== undefined && (obj.height = Math.round(message.height)); + message.time !== undefined && (obj.time = message.time.toISOString()); + message.nextValidatorsHash !== undefined + && (obj.nextValidatorsHash = base64FromBytes( + message.nextValidatorsHash !== undefined ? message.nextValidatorsHash : new Uint8Array(), + )); + message.proposerAddress !== undefined + && (obj.proposerAddress = base64FromBytes( + message.proposerAddress !== undefined ? message.proposerAddress : new Uint8Array(), + )); + return obj; + }, + + fromPartial, I>>(object: I): RequestPrepareProposal { + const message = createBaseRequestPrepareProposal(); + message.maxTxBytes = object.maxTxBytes ?? 0; + message.txs = object.txs?.map((e) => e) || []; + message.localLastCommit = (object.localLastCommit !== undefined && object.localLastCommit !== null) + ? ExtendedCommitInfo.fromPartial(object.localLastCommit) + : undefined; + message.misbehavior = object.misbehavior?.map((e) => Misbehavior.fromPartial(e)) || []; + message.height = object.height ?? 0; + message.time = object.time ?? undefined; + message.nextValidatorsHash = object.nextValidatorsHash ?? new Uint8Array(); + message.proposerAddress = object.proposerAddress ?? new Uint8Array(); + return message; + }, +}; + +function createBaseRequestProcessProposal(): RequestProcessProposal { + return { + txs: [], + proposedLastCommit: undefined, + misbehavior: [], + hash: new Uint8Array(), + height: 0, + time: undefined, + nextValidatorsHash: new Uint8Array(), + proposerAddress: new Uint8Array(), + }; +} + +export const RequestProcessProposal = { + encode(message: RequestProcessProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.txs) { + writer.uint32(10).bytes(v!); + } + if (message.proposedLastCommit !== undefined) { + CommitInfo.encode(message.proposedLastCommit, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.misbehavior) { + Misbehavior.encode(v!, writer.uint32(26).fork()).ldelim(); + } + if (message.hash.length !== 0) { + writer.uint32(34).bytes(message.hash); + } + if (message.height !== 0) { + writer.uint32(40).int64(message.height); + } + if (message.time !== undefined) { + Timestamp.encode(toTimestamp(message.time), writer.uint32(50).fork()).ldelim(); + } + if (message.nextValidatorsHash.length !== 0) { + writer.uint32(58).bytes(message.nextValidatorsHash); + } + if (message.proposerAddress.length !== 0) { + writer.uint32(66).bytes(message.proposerAddress); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RequestProcessProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRequestProcessProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.txs.push(reader.bytes()); + break; + case 2: + message.proposedLastCommit = CommitInfo.decode(reader, reader.uint32()); + break; + case 3: + message.misbehavior.push(Misbehavior.decode(reader, reader.uint32())); + break; + case 4: + message.hash = reader.bytes(); + break; + case 5: + message.height = longToNumber(reader.int64() as Long); + break; + case 6: + message.time = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 7: + message.nextValidatorsHash = reader.bytes(); + break; + case 8: + message.proposerAddress = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RequestProcessProposal { + return { + txs: Array.isArray(object?.txs) ? object.txs.map((e: any) => bytesFromBase64(e)) : [], + proposedLastCommit: isSet(object.proposedLastCommit) ? CommitInfo.fromJSON(object.proposedLastCommit) : undefined, + misbehavior: Array.isArray(object?.misbehavior) + ? object.misbehavior.map((e: any) => Misbehavior.fromJSON(e)) + : [], + hash: isSet(object.hash) ? bytesFromBase64(object.hash) : new Uint8Array(), + height: isSet(object.height) ? Number(object.height) : 0, + time: isSet(object.time) ? fromJsonTimestamp(object.time) : undefined, + nextValidatorsHash: isSet(object.nextValidatorsHash) + ? bytesFromBase64(object.nextValidatorsHash) + : new Uint8Array(), + proposerAddress: isSet(object.proposerAddress) ? bytesFromBase64(object.proposerAddress) : new Uint8Array(), + }; + }, + + toJSON(message: RequestProcessProposal): unknown { + const obj: any = {}; + if (message.txs) { + obj.txs = message.txs.map((e) => base64FromBytes(e !== undefined ? e : new Uint8Array())); + } else { + obj.txs = []; + } + message.proposedLastCommit !== undefined && (obj.proposedLastCommit = message.proposedLastCommit + ? CommitInfo.toJSON(message.proposedLastCommit) + : undefined); + if (message.misbehavior) { + obj.misbehavior = message.misbehavior.map((e) => e ? Misbehavior.toJSON(e) : undefined); + } else { + obj.misbehavior = []; + } + message.hash !== undefined + && (obj.hash = base64FromBytes(message.hash !== undefined ? message.hash : new Uint8Array())); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.time !== undefined && (obj.time = message.time.toISOString()); + message.nextValidatorsHash !== undefined + && (obj.nextValidatorsHash = base64FromBytes( + message.nextValidatorsHash !== undefined ? message.nextValidatorsHash : new Uint8Array(), + )); + message.proposerAddress !== undefined + && (obj.proposerAddress = base64FromBytes( + message.proposerAddress !== undefined ? message.proposerAddress : new Uint8Array(), + )); + return obj; + }, + + fromPartial, I>>(object: I): RequestProcessProposal { + const message = createBaseRequestProcessProposal(); + message.txs = object.txs?.map((e) => e) || []; + message.proposedLastCommit = (object.proposedLastCommit !== undefined && object.proposedLastCommit !== null) + ? CommitInfo.fromPartial(object.proposedLastCommit) + : undefined; + message.misbehavior = object.misbehavior?.map((e) => Misbehavior.fromPartial(e)) || []; + message.hash = object.hash ?? new Uint8Array(); + message.height = object.height ?? 0; + message.time = object.time ?? undefined; + message.nextValidatorsHash = object.nextValidatorsHash ?? new Uint8Array(); + message.proposerAddress = object.proposerAddress ?? new Uint8Array(); + return message; + }, +}; + +function createBaseResponse(): Response { + return { + exception: undefined, + echo: undefined, + flush: undefined, + info: undefined, + initChain: undefined, + query: undefined, + beginBlock: undefined, + checkTx: undefined, + deliverTx: undefined, + endBlock: undefined, + commit: undefined, + listSnapshots: undefined, + offerSnapshot: undefined, + loadSnapshotChunk: undefined, + applySnapshotChunk: undefined, + prepareProposal: undefined, + processProposal: undefined, + }; +} + +export const Response = { + encode(message: Response, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.exception !== undefined) { + ResponseException.encode(message.exception, writer.uint32(10).fork()).ldelim(); + } + if (message.echo !== undefined) { + ResponseEcho.encode(message.echo, writer.uint32(18).fork()).ldelim(); + } + if (message.flush !== undefined) { + ResponseFlush.encode(message.flush, writer.uint32(26).fork()).ldelim(); + } + if (message.info !== undefined) { + ResponseInfo.encode(message.info, writer.uint32(34).fork()).ldelim(); + } + if (message.initChain !== undefined) { + ResponseInitChain.encode(message.initChain, writer.uint32(50).fork()).ldelim(); + } + if (message.query !== undefined) { + ResponseQuery.encode(message.query, writer.uint32(58).fork()).ldelim(); + } + if (message.beginBlock !== undefined) { + ResponseBeginBlock.encode(message.beginBlock, writer.uint32(66).fork()).ldelim(); + } + if (message.checkTx !== undefined) { + ResponseCheckTx.encode(message.checkTx, writer.uint32(74).fork()).ldelim(); + } + if (message.deliverTx !== undefined) { + ResponseDeliverTx.encode(message.deliverTx, writer.uint32(82).fork()).ldelim(); + } + if (message.endBlock !== undefined) { + ResponseEndBlock.encode(message.endBlock, writer.uint32(90).fork()).ldelim(); + } + if (message.commit !== undefined) { + ResponseCommit.encode(message.commit, writer.uint32(98).fork()).ldelim(); + } + if (message.listSnapshots !== undefined) { + ResponseListSnapshots.encode(message.listSnapshots, writer.uint32(106).fork()).ldelim(); + } + if (message.offerSnapshot !== undefined) { + ResponseOfferSnapshot.encode(message.offerSnapshot, writer.uint32(114).fork()).ldelim(); + } + if (message.loadSnapshotChunk !== undefined) { + ResponseLoadSnapshotChunk.encode(message.loadSnapshotChunk, writer.uint32(122).fork()).ldelim(); + } + if (message.applySnapshotChunk !== undefined) { + ResponseApplySnapshotChunk.encode(message.applySnapshotChunk, writer.uint32(130).fork()).ldelim(); + } + if (message.prepareProposal !== undefined) { + ResponsePrepareProposal.encode(message.prepareProposal, writer.uint32(138).fork()).ldelim(); + } + if (message.processProposal !== undefined) { + ResponseProcessProposal.encode(message.processProposal, writer.uint32(146).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Response { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.exception = ResponseException.decode(reader, reader.uint32()); + break; + case 2: + message.echo = ResponseEcho.decode(reader, reader.uint32()); + break; + case 3: + message.flush = ResponseFlush.decode(reader, reader.uint32()); + break; + case 4: + message.info = ResponseInfo.decode(reader, reader.uint32()); + break; + case 6: + message.initChain = ResponseInitChain.decode(reader, reader.uint32()); + break; + case 7: + message.query = ResponseQuery.decode(reader, reader.uint32()); + break; + case 8: + message.beginBlock = ResponseBeginBlock.decode(reader, reader.uint32()); + break; + case 9: + message.checkTx = ResponseCheckTx.decode(reader, reader.uint32()); + break; + case 10: + message.deliverTx = ResponseDeliverTx.decode(reader, reader.uint32()); + break; + case 11: + message.endBlock = ResponseEndBlock.decode(reader, reader.uint32()); + break; + case 12: + message.commit = ResponseCommit.decode(reader, reader.uint32()); + break; + case 13: + message.listSnapshots = ResponseListSnapshots.decode(reader, reader.uint32()); + break; + case 14: + message.offerSnapshot = ResponseOfferSnapshot.decode(reader, reader.uint32()); + break; + case 15: + message.loadSnapshotChunk = ResponseLoadSnapshotChunk.decode(reader, reader.uint32()); + break; + case 16: + message.applySnapshotChunk = ResponseApplySnapshotChunk.decode(reader, reader.uint32()); + break; + case 17: + message.prepareProposal = ResponsePrepareProposal.decode(reader, reader.uint32()); + break; + case 18: + message.processProposal = ResponseProcessProposal.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Response { + return { + exception: isSet(object.exception) ? ResponseException.fromJSON(object.exception) : undefined, + echo: isSet(object.echo) ? ResponseEcho.fromJSON(object.echo) : undefined, + flush: isSet(object.flush) ? ResponseFlush.fromJSON(object.flush) : undefined, + info: isSet(object.info) ? ResponseInfo.fromJSON(object.info) : undefined, + initChain: isSet(object.initChain) ? ResponseInitChain.fromJSON(object.initChain) : undefined, + query: isSet(object.query) ? ResponseQuery.fromJSON(object.query) : undefined, + beginBlock: isSet(object.beginBlock) ? ResponseBeginBlock.fromJSON(object.beginBlock) : undefined, + checkTx: isSet(object.checkTx) ? ResponseCheckTx.fromJSON(object.checkTx) : undefined, + deliverTx: isSet(object.deliverTx) ? ResponseDeliverTx.fromJSON(object.deliverTx) : undefined, + endBlock: isSet(object.endBlock) ? ResponseEndBlock.fromJSON(object.endBlock) : undefined, + commit: isSet(object.commit) ? ResponseCommit.fromJSON(object.commit) : undefined, + listSnapshots: isSet(object.listSnapshots) ? ResponseListSnapshots.fromJSON(object.listSnapshots) : undefined, + offerSnapshot: isSet(object.offerSnapshot) ? ResponseOfferSnapshot.fromJSON(object.offerSnapshot) : undefined, + loadSnapshotChunk: isSet(object.loadSnapshotChunk) + ? ResponseLoadSnapshotChunk.fromJSON(object.loadSnapshotChunk) + : undefined, + applySnapshotChunk: isSet(object.applySnapshotChunk) + ? ResponseApplySnapshotChunk.fromJSON(object.applySnapshotChunk) + : undefined, + prepareProposal: isSet(object.prepareProposal) + ? ResponsePrepareProposal.fromJSON(object.prepareProposal) + : undefined, + processProposal: isSet(object.processProposal) + ? ResponseProcessProposal.fromJSON(object.processProposal) + : undefined, + }; + }, + + toJSON(message: Response): unknown { + const obj: any = {}; + message.exception !== undefined + && (obj.exception = message.exception ? ResponseException.toJSON(message.exception) : undefined); + message.echo !== undefined && (obj.echo = message.echo ? ResponseEcho.toJSON(message.echo) : undefined); + message.flush !== undefined && (obj.flush = message.flush ? ResponseFlush.toJSON(message.flush) : undefined); + message.info !== undefined && (obj.info = message.info ? ResponseInfo.toJSON(message.info) : undefined); + message.initChain !== undefined + && (obj.initChain = message.initChain ? ResponseInitChain.toJSON(message.initChain) : undefined); + message.query !== undefined && (obj.query = message.query ? ResponseQuery.toJSON(message.query) : undefined); + message.beginBlock !== undefined + && (obj.beginBlock = message.beginBlock ? ResponseBeginBlock.toJSON(message.beginBlock) : undefined); + message.checkTx !== undefined + && (obj.checkTx = message.checkTx ? ResponseCheckTx.toJSON(message.checkTx) : undefined); + message.deliverTx !== undefined + && (obj.deliverTx = message.deliverTx ? ResponseDeliverTx.toJSON(message.deliverTx) : undefined); + message.endBlock !== undefined + && (obj.endBlock = message.endBlock ? ResponseEndBlock.toJSON(message.endBlock) : undefined); + message.commit !== undefined && (obj.commit = message.commit ? ResponseCommit.toJSON(message.commit) : undefined); + message.listSnapshots !== undefined + && (obj.listSnapshots = message.listSnapshots ? ResponseListSnapshots.toJSON(message.listSnapshots) : undefined); + message.offerSnapshot !== undefined + && (obj.offerSnapshot = message.offerSnapshot ? ResponseOfferSnapshot.toJSON(message.offerSnapshot) : undefined); + message.loadSnapshotChunk !== undefined && (obj.loadSnapshotChunk = message.loadSnapshotChunk + ? ResponseLoadSnapshotChunk.toJSON(message.loadSnapshotChunk) + : undefined); + message.applySnapshotChunk !== undefined && (obj.applySnapshotChunk = message.applySnapshotChunk + ? ResponseApplySnapshotChunk.toJSON(message.applySnapshotChunk) + : undefined); + message.prepareProposal !== undefined && (obj.prepareProposal = message.prepareProposal + ? ResponsePrepareProposal.toJSON(message.prepareProposal) + : undefined); + message.processProposal !== undefined && (obj.processProposal = message.processProposal + ? ResponseProcessProposal.toJSON(message.processProposal) + : undefined); + return obj; + }, + + fromPartial, I>>(object: I): Response { + const message = createBaseResponse(); + message.exception = (object.exception !== undefined && object.exception !== null) + ? ResponseException.fromPartial(object.exception) + : undefined; + message.echo = (object.echo !== undefined && object.echo !== null) + ? ResponseEcho.fromPartial(object.echo) + : undefined; + message.flush = (object.flush !== undefined && object.flush !== null) + ? ResponseFlush.fromPartial(object.flush) + : undefined; + message.info = (object.info !== undefined && object.info !== null) + ? ResponseInfo.fromPartial(object.info) + : undefined; + message.initChain = (object.initChain !== undefined && object.initChain !== null) + ? ResponseInitChain.fromPartial(object.initChain) + : undefined; + message.query = (object.query !== undefined && object.query !== null) + ? ResponseQuery.fromPartial(object.query) + : undefined; + message.beginBlock = (object.beginBlock !== undefined && object.beginBlock !== null) + ? ResponseBeginBlock.fromPartial(object.beginBlock) + : undefined; + message.checkTx = (object.checkTx !== undefined && object.checkTx !== null) + ? ResponseCheckTx.fromPartial(object.checkTx) + : undefined; + message.deliverTx = (object.deliverTx !== undefined && object.deliverTx !== null) + ? ResponseDeliverTx.fromPartial(object.deliverTx) + : undefined; + message.endBlock = (object.endBlock !== undefined && object.endBlock !== null) + ? ResponseEndBlock.fromPartial(object.endBlock) + : undefined; + message.commit = (object.commit !== undefined && object.commit !== null) + ? ResponseCommit.fromPartial(object.commit) + : undefined; + message.listSnapshots = (object.listSnapshots !== undefined && object.listSnapshots !== null) + ? ResponseListSnapshots.fromPartial(object.listSnapshots) + : undefined; + message.offerSnapshot = (object.offerSnapshot !== undefined && object.offerSnapshot !== null) + ? ResponseOfferSnapshot.fromPartial(object.offerSnapshot) + : undefined; + message.loadSnapshotChunk = (object.loadSnapshotChunk !== undefined && object.loadSnapshotChunk !== null) + ? ResponseLoadSnapshotChunk.fromPartial(object.loadSnapshotChunk) + : undefined; + message.applySnapshotChunk = (object.applySnapshotChunk !== undefined && object.applySnapshotChunk !== null) + ? ResponseApplySnapshotChunk.fromPartial(object.applySnapshotChunk) + : undefined; + message.prepareProposal = (object.prepareProposal !== undefined && object.prepareProposal !== null) + ? ResponsePrepareProposal.fromPartial(object.prepareProposal) + : undefined; + message.processProposal = (object.processProposal !== undefined && object.processProposal !== null) + ? ResponseProcessProposal.fromPartial(object.processProposal) + : undefined; + return message; + }, +}; + +function createBaseResponseException(): ResponseException { + return { error: "" }; +} + +export const ResponseException = { + encode(message: ResponseException, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.error !== "") { + writer.uint32(10).string(message.error); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseException { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseException(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.error = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseException { + return { error: isSet(object.error) ? String(object.error) : "" }; + }, + + toJSON(message: ResponseException): unknown { + const obj: any = {}; + message.error !== undefined && (obj.error = message.error); + return obj; + }, + + fromPartial, I>>(object: I): ResponseException { + const message = createBaseResponseException(); + message.error = object.error ?? ""; + return message; + }, +}; + +function createBaseResponseEcho(): ResponseEcho { + return { message: "" }; +} + +export const ResponseEcho = { + encode(message: ResponseEcho, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.message !== "") { + writer.uint32(10).string(message.message); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseEcho { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseEcho(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.message = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseEcho { + return { message: isSet(object.message) ? String(object.message) : "" }; + }, + + toJSON(message: ResponseEcho): unknown { + const obj: any = {}; + message.message !== undefined && (obj.message = message.message); + return obj; + }, + + fromPartial, I>>(object: I): ResponseEcho { + const message = createBaseResponseEcho(); + message.message = object.message ?? ""; + return message; + }, +}; + +function createBaseResponseFlush(): ResponseFlush { + return {}; +} + +export const ResponseFlush = { + encode(_: ResponseFlush, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseFlush { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseFlush(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): ResponseFlush { + return {}; + }, + + toJSON(_: ResponseFlush): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): ResponseFlush { + const message = createBaseResponseFlush(); + return message; + }, +}; + +function createBaseResponseInfo(): ResponseInfo { + return { data: "", version: "", appVersion: 0, lastBlockHeight: 0, lastBlockAppHash: new Uint8Array() }; +} + +export const ResponseInfo = { + encode(message: ResponseInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.data !== "") { + writer.uint32(10).string(message.data); + } + if (message.version !== "") { + writer.uint32(18).string(message.version); + } + if (message.appVersion !== 0) { + writer.uint32(24).uint64(message.appVersion); + } + if (message.lastBlockHeight !== 0) { + writer.uint32(32).int64(message.lastBlockHeight); + } + if (message.lastBlockAppHash.length !== 0) { + writer.uint32(42).bytes(message.lastBlockAppHash); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.data = reader.string(); + break; + case 2: + message.version = reader.string(); + break; + case 3: + message.appVersion = longToNumber(reader.uint64() as Long); + break; + case 4: + message.lastBlockHeight = longToNumber(reader.int64() as Long); + break; + case 5: + message.lastBlockAppHash = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseInfo { + return { + data: isSet(object.data) ? String(object.data) : "", + version: isSet(object.version) ? String(object.version) : "", + appVersion: isSet(object.appVersion) ? Number(object.appVersion) : 0, + lastBlockHeight: isSet(object.lastBlockHeight) ? Number(object.lastBlockHeight) : 0, + lastBlockAppHash: isSet(object.lastBlockAppHash) ? bytesFromBase64(object.lastBlockAppHash) : new Uint8Array(), + }; + }, + + toJSON(message: ResponseInfo): unknown { + const obj: any = {}; + message.data !== undefined && (obj.data = message.data); + message.version !== undefined && (obj.version = message.version); + message.appVersion !== undefined && (obj.appVersion = Math.round(message.appVersion)); + message.lastBlockHeight !== undefined && (obj.lastBlockHeight = Math.round(message.lastBlockHeight)); + message.lastBlockAppHash !== undefined + && (obj.lastBlockAppHash = base64FromBytes( + message.lastBlockAppHash !== undefined ? message.lastBlockAppHash : new Uint8Array(), + )); + return obj; + }, + + fromPartial, I>>(object: I): ResponseInfo { + const message = createBaseResponseInfo(); + message.data = object.data ?? ""; + message.version = object.version ?? ""; + message.appVersion = object.appVersion ?? 0; + message.lastBlockHeight = object.lastBlockHeight ?? 0; + message.lastBlockAppHash = object.lastBlockAppHash ?? new Uint8Array(); + return message; + }, +}; + +function createBaseResponseInitChain(): ResponseInitChain { + return { consensusParams: undefined, validators: [], appHash: new Uint8Array() }; +} + +export const ResponseInitChain = { + encode(message: ResponseInitChain, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.consensusParams !== undefined) { + ConsensusParams.encode(message.consensusParams, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.validators) { + ValidatorUpdate.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.appHash.length !== 0) { + writer.uint32(26).bytes(message.appHash); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseInitChain { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseInitChain(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.consensusParams = ConsensusParams.decode(reader, reader.uint32()); + break; + case 2: + message.validators.push(ValidatorUpdate.decode(reader, reader.uint32())); + break; + case 3: + message.appHash = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseInitChain { + return { + consensusParams: isSet(object.consensusParams) ? ConsensusParams.fromJSON(object.consensusParams) : undefined, + validators: Array.isArray(object?.validators) + ? object.validators.map((e: any) => ValidatorUpdate.fromJSON(e)) + : [], + appHash: isSet(object.appHash) ? bytesFromBase64(object.appHash) : new Uint8Array(), + }; + }, + + toJSON(message: ResponseInitChain): unknown { + const obj: any = {}; + message.consensusParams !== undefined + && (obj.consensusParams = message.consensusParams ? ConsensusParams.toJSON(message.consensusParams) : undefined); + if (message.validators) { + obj.validators = message.validators.map((e) => e ? ValidatorUpdate.toJSON(e) : undefined); + } else { + obj.validators = []; + } + message.appHash !== undefined + && (obj.appHash = base64FromBytes(message.appHash !== undefined ? message.appHash : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): ResponseInitChain { + const message = createBaseResponseInitChain(); + message.consensusParams = (object.consensusParams !== undefined && object.consensusParams !== null) + ? ConsensusParams.fromPartial(object.consensusParams) + : undefined; + message.validators = object.validators?.map((e) => ValidatorUpdate.fromPartial(e)) || []; + message.appHash = object.appHash ?? new Uint8Array(); + return message; + }, +}; + +function createBaseResponseQuery(): ResponseQuery { + return { + code: 0, + log: "", + info: "", + index: 0, + key: new Uint8Array(), + value: new Uint8Array(), + proofOps: undefined, + height: 0, + codespace: "", + }; +} + +export const ResponseQuery = { + encode(message: ResponseQuery, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.code !== 0) { + writer.uint32(8).uint32(message.code); + } + if (message.log !== "") { + writer.uint32(26).string(message.log); + } + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + if (message.index !== 0) { + writer.uint32(40).int64(message.index); + } + if (message.key.length !== 0) { + writer.uint32(50).bytes(message.key); + } + if (message.value.length !== 0) { + writer.uint32(58).bytes(message.value); + } + if (message.proofOps !== undefined) { + ProofOps.encode(message.proofOps, writer.uint32(66).fork()).ldelim(); + } + if (message.height !== 0) { + writer.uint32(72).int64(message.height); + } + if (message.codespace !== "") { + writer.uint32(82).string(message.codespace); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseQuery { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseQuery(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.code = reader.uint32(); + break; + case 3: + message.log = reader.string(); + break; + case 4: + message.info = reader.string(); + break; + case 5: + message.index = longToNumber(reader.int64() as Long); + break; + case 6: + message.key = reader.bytes(); + break; + case 7: + message.value = reader.bytes(); + break; + case 8: + message.proofOps = ProofOps.decode(reader, reader.uint32()); + break; + case 9: + message.height = longToNumber(reader.int64() as Long); + break; + case 10: + message.codespace = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseQuery { + return { + code: isSet(object.code) ? Number(object.code) : 0, + log: isSet(object.log) ? String(object.log) : "", + info: isSet(object.info) ? String(object.info) : "", + index: isSet(object.index) ? Number(object.index) : 0, + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + value: isSet(object.value) ? bytesFromBase64(object.value) : new Uint8Array(), + proofOps: isSet(object.proofOps) ? ProofOps.fromJSON(object.proofOps) : undefined, + height: isSet(object.height) ? Number(object.height) : 0, + codespace: isSet(object.codespace) ? String(object.codespace) : "", + }; + }, + + toJSON(message: ResponseQuery): unknown { + const obj: any = {}; + message.code !== undefined && (obj.code = Math.round(message.code)); + message.log !== undefined && (obj.log = message.log); + message.info !== undefined && (obj.info = message.info); + message.index !== undefined && (obj.index = Math.round(message.index)); + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.value !== undefined + && (obj.value = base64FromBytes(message.value !== undefined ? message.value : new Uint8Array())); + message.proofOps !== undefined && (obj.proofOps = message.proofOps ? ProofOps.toJSON(message.proofOps) : undefined); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.codespace !== undefined && (obj.codespace = message.codespace); + return obj; + }, + + fromPartial, I>>(object: I): ResponseQuery { + const message = createBaseResponseQuery(); + message.code = object.code ?? 0; + message.log = object.log ?? ""; + message.info = object.info ?? ""; + message.index = object.index ?? 0; + message.key = object.key ?? new Uint8Array(); + message.value = object.value ?? new Uint8Array(); + message.proofOps = (object.proofOps !== undefined && object.proofOps !== null) + ? ProofOps.fromPartial(object.proofOps) + : undefined; + message.height = object.height ?? 0; + message.codespace = object.codespace ?? ""; + return message; + }, +}; + +function createBaseResponseBeginBlock(): ResponseBeginBlock { + return { events: [] }; +} + +export const ResponseBeginBlock = { + encode(message: ResponseBeginBlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.events) { + Event.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseBeginBlock { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseBeginBlock(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.events.push(Event.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseBeginBlock { + return { events: Array.isArray(object?.events) ? object.events.map((e: any) => Event.fromJSON(e)) : [] }; + }, + + toJSON(message: ResponseBeginBlock): unknown { + const obj: any = {}; + if (message.events) { + obj.events = message.events.map((e) => e ? Event.toJSON(e) : undefined); + } else { + obj.events = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ResponseBeginBlock { + const message = createBaseResponseBeginBlock(); + message.events = object.events?.map((e) => Event.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseResponseCheckTx(): ResponseCheckTx { + return { + code: 0, + data: new Uint8Array(), + log: "", + info: "", + gasWanted: 0, + gasUsed: 0, + events: [], + codespace: "", + sender: "", + priority: 0, + mempoolError: "", + }; +} + +export const ResponseCheckTx = { + encode(message: ResponseCheckTx, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.code !== 0) { + writer.uint32(8).uint32(message.code); + } + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + if (message.log !== "") { + writer.uint32(26).string(message.log); + } + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + if (message.gasWanted !== 0) { + writer.uint32(40).int64(message.gasWanted); + } + if (message.gasUsed !== 0) { + writer.uint32(48).int64(message.gasUsed); + } + for (const v of message.events) { + Event.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.codespace !== "") { + writer.uint32(66).string(message.codespace); + } + if (message.sender !== "") { + writer.uint32(74).string(message.sender); + } + if (message.priority !== 0) { + writer.uint32(80).int64(message.priority); + } + if (message.mempoolError !== "") { + writer.uint32(90).string(message.mempoolError); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseCheckTx { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseCheckTx(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.code = reader.uint32(); + break; + case 2: + message.data = reader.bytes(); + break; + case 3: + message.log = reader.string(); + break; + case 4: + message.info = reader.string(); + break; + case 5: + message.gasWanted = longToNumber(reader.int64() as Long); + break; + case 6: + message.gasUsed = longToNumber(reader.int64() as Long); + break; + case 7: + message.events.push(Event.decode(reader, reader.uint32())); + break; + case 8: + message.codespace = reader.string(); + break; + case 9: + message.sender = reader.string(); + break; + case 10: + message.priority = longToNumber(reader.int64() as Long); + break; + case 11: + message.mempoolError = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseCheckTx { + return { + code: isSet(object.code) ? Number(object.code) : 0, + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(), + log: isSet(object.log) ? String(object.log) : "", + info: isSet(object.info) ? String(object.info) : "", + gasWanted: isSet(object.gas_wanted) ? Number(object.gas_wanted) : 0, + gasUsed: isSet(object.gas_used) ? Number(object.gas_used) : 0, + events: Array.isArray(object?.events) ? object.events.map((e: any) => Event.fromJSON(e)) : [], + codespace: isSet(object.codespace) ? String(object.codespace) : "", + sender: isSet(object.sender) ? String(object.sender) : "", + priority: isSet(object.priority) ? Number(object.priority) : 0, + mempoolError: isSet(object.mempoolError) ? String(object.mempoolError) : "", + }; + }, + + toJSON(message: ResponseCheckTx): unknown { + const obj: any = {}; + message.code !== undefined && (obj.code = Math.round(message.code)); + message.data !== undefined + && (obj.data = base64FromBytes(message.data !== undefined ? message.data : new Uint8Array())); + message.log !== undefined && (obj.log = message.log); + message.info !== undefined && (obj.info = message.info); + message.gasWanted !== undefined && (obj.gas_wanted = Math.round(message.gasWanted)); + message.gasUsed !== undefined && (obj.gas_used = Math.round(message.gasUsed)); + if (message.events) { + obj.events = message.events.map((e) => e ? Event.toJSON(e) : undefined); + } else { + obj.events = []; + } + message.codespace !== undefined && (obj.codespace = message.codespace); + message.sender !== undefined && (obj.sender = message.sender); + message.priority !== undefined && (obj.priority = Math.round(message.priority)); + message.mempoolError !== undefined && (obj.mempoolError = message.mempoolError); + return obj; + }, + + fromPartial, I>>(object: I): ResponseCheckTx { + const message = createBaseResponseCheckTx(); + message.code = object.code ?? 0; + message.data = object.data ?? new Uint8Array(); + message.log = object.log ?? ""; + message.info = object.info ?? ""; + message.gasWanted = object.gasWanted ?? 0; + message.gasUsed = object.gasUsed ?? 0; + message.events = object.events?.map((e) => Event.fromPartial(e)) || []; + message.codespace = object.codespace ?? ""; + message.sender = object.sender ?? ""; + message.priority = object.priority ?? 0; + message.mempoolError = object.mempoolError ?? ""; + return message; + }, +}; + +function createBaseResponseDeliverTx(): ResponseDeliverTx { + return { code: 0, data: new Uint8Array(), log: "", info: "", gasWanted: 0, gasUsed: 0, events: [], codespace: "" }; +} + +export const ResponseDeliverTx = { + encode(message: ResponseDeliverTx, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.code !== 0) { + writer.uint32(8).uint32(message.code); + } + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + if (message.log !== "") { + writer.uint32(26).string(message.log); + } + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + if (message.gasWanted !== 0) { + writer.uint32(40).int64(message.gasWanted); + } + if (message.gasUsed !== 0) { + writer.uint32(48).int64(message.gasUsed); + } + for (const v of message.events) { + Event.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.codespace !== "") { + writer.uint32(66).string(message.codespace); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseDeliverTx { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseDeliverTx(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.code = reader.uint32(); + break; + case 2: + message.data = reader.bytes(); + break; + case 3: + message.log = reader.string(); + break; + case 4: + message.info = reader.string(); + break; + case 5: + message.gasWanted = longToNumber(reader.int64() as Long); + break; + case 6: + message.gasUsed = longToNumber(reader.int64() as Long); + break; + case 7: + message.events.push(Event.decode(reader, reader.uint32())); + break; + case 8: + message.codespace = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseDeliverTx { + return { + code: isSet(object.code) ? Number(object.code) : 0, + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(), + log: isSet(object.log) ? String(object.log) : "", + info: isSet(object.info) ? String(object.info) : "", + gasWanted: isSet(object.gas_wanted) ? Number(object.gas_wanted) : 0, + gasUsed: isSet(object.gas_used) ? Number(object.gas_used) : 0, + events: Array.isArray(object?.events) ? object.events.map((e: any) => Event.fromJSON(e)) : [], + codespace: isSet(object.codespace) ? String(object.codespace) : "", + }; + }, + + toJSON(message: ResponseDeliverTx): unknown { + const obj: any = {}; + message.code !== undefined && (obj.code = Math.round(message.code)); + message.data !== undefined + && (obj.data = base64FromBytes(message.data !== undefined ? message.data : new Uint8Array())); + message.log !== undefined && (obj.log = message.log); + message.info !== undefined && (obj.info = message.info); + message.gasWanted !== undefined && (obj.gas_wanted = Math.round(message.gasWanted)); + message.gasUsed !== undefined && (obj.gas_used = Math.round(message.gasUsed)); + if (message.events) { + obj.events = message.events.map((e) => e ? Event.toJSON(e) : undefined); + } else { + obj.events = []; + } + message.codespace !== undefined && (obj.codespace = message.codespace); + return obj; + }, + + fromPartial, I>>(object: I): ResponseDeliverTx { + const message = createBaseResponseDeliverTx(); + message.code = object.code ?? 0; + message.data = object.data ?? new Uint8Array(); + message.log = object.log ?? ""; + message.info = object.info ?? ""; + message.gasWanted = object.gasWanted ?? 0; + message.gasUsed = object.gasUsed ?? 0; + message.events = object.events?.map((e) => Event.fromPartial(e)) || []; + message.codespace = object.codespace ?? ""; + return message; + }, +}; + +function createBaseResponseEndBlock(): ResponseEndBlock { + return { validatorUpdates: [], consensusParamUpdates: undefined, events: [] }; +} + +export const ResponseEndBlock = { + encode(message: ResponseEndBlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.validatorUpdates) { + ValidatorUpdate.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.consensusParamUpdates !== undefined) { + ConsensusParams.encode(message.consensusParamUpdates, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.events) { + Event.encode(v!, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseEndBlock { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseEndBlock(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.validatorUpdates.push(ValidatorUpdate.decode(reader, reader.uint32())); + break; + case 2: + message.consensusParamUpdates = ConsensusParams.decode(reader, reader.uint32()); + break; + case 3: + message.events.push(Event.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseEndBlock { + return { + validatorUpdates: Array.isArray(object?.validatorUpdates) + ? object.validatorUpdates.map((e: any) => ValidatorUpdate.fromJSON(e)) + : [], + consensusParamUpdates: isSet(object.consensusParamUpdates) + ? ConsensusParams.fromJSON(object.consensusParamUpdates) + : undefined, + events: Array.isArray(object?.events) ? object.events.map((e: any) => Event.fromJSON(e)) : [], + }; + }, + + toJSON(message: ResponseEndBlock): unknown { + const obj: any = {}; + if (message.validatorUpdates) { + obj.validatorUpdates = message.validatorUpdates.map((e) => e ? ValidatorUpdate.toJSON(e) : undefined); + } else { + obj.validatorUpdates = []; + } + message.consensusParamUpdates !== undefined && (obj.consensusParamUpdates = message.consensusParamUpdates + ? ConsensusParams.toJSON(message.consensusParamUpdates) + : undefined); + if (message.events) { + obj.events = message.events.map((e) => e ? Event.toJSON(e) : undefined); + } else { + obj.events = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ResponseEndBlock { + const message = createBaseResponseEndBlock(); + message.validatorUpdates = object.validatorUpdates?.map((e) => ValidatorUpdate.fromPartial(e)) || []; + message.consensusParamUpdates = + (object.consensusParamUpdates !== undefined && object.consensusParamUpdates !== null) + ? ConsensusParams.fromPartial(object.consensusParamUpdates) + : undefined; + message.events = object.events?.map((e) => Event.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseResponseCommit(): ResponseCommit { + return { data: new Uint8Array(), retainHeight: 0 }; +} + +export const ResponseCommit = { + encode(message: ResponseCommit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + if (message.retainHeight !== 0) { + writer.uint32(24).int64(message.retainHeight); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseCommit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseCommit(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.data = reader.bytes(); + break; + case 3: + message.retainHeight = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseCommit { + return { + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(), + retainHeight: isSet(object.retainHeight) ? Number(object.retainHeight) : 0, + }; + }, + + toJSON(message: ResponseCommit): unknown { + const obj: any = {}; + message.data !== undefined + && (obj.data = base64FromBytes(message.data !== undefined ? message.data : new Uint8Array())); + message.retainHeight !== undefined && (obj.retainHeight = Math.round(message.retainHeight)); + return obj; + }, + + fromPartial, I>>(object: I): ResponseCommit { + const message = createBaseResponseCommit(); + message.data = object.data ?? new Uint8Array(); + message.retainHeight = object.retainHeight ?? 0; + return message; + }, +}; + +function createBaseResponseListSnapshots(): ResponseListSnapshots { + return { snapshots: [] }; +} + +export const ResponseListSnapshots = { + encode(message: ResponseListSnapshots, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.snapshots) { + Snapshot.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseListSnapshots { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseListSnapshots(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.snapshots.push(Snapshot.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseListSnapshots { + return { + snapshots: Array.isArray(object?.snapshots) ? object.snapshots.map((e: any) => Snapshot.fromJSON(e)) : [], + }; + }, + + toJSON(message: ResponseListSnapshots): unknown { + const obj: any = {}; + if (message.snapshots) { + obj.snapshots = message.snapshots.map((e) => e ? Snapshot.toJSON(e) : undefined); + } else { + obj.snapshots = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ResponseListSnapshots { + const message = createBaseResponseListSnapshots(); + message.snapshots = object.snapshots?.map((e) => Snapshot.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseResponseOfferSnapshot(): ResponseOfferSnapshot { + return { result: 0 }; +} + +export const ResponseOfferSnapshot = { + encode(message: ResponseOfferSnapshot, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.result !== 0) { + writer.uint32(8).int32(message.result); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseOfferSnapshot { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseOfferSnapshot(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.result = reader.int32() as any; + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseOfferSnapshot { + return { result: isSet(object.result) ? responseOfferSnapshot_ResultFromJSON(object.result) : 0 }; + }, + + toJSON(message: ResponseOfferSnapshot): unknown { + const obj: any = {}; + message.result !== undefined && (obj.result = responseOfferSnapshot_ResultToJSON(message.result)); + return obj; + }, + + fromPartial, I>>(object: I): ResponseOfferSnapshot { + const message = createBaseResponseOfferSnapshot(); + message.result = object.result ?? 0; + return message; + }, +}; + +function createBaseResponseLoadSnapshotChunk(): ResponseLoadSnapshotChunk { + return { chunk: new Uint8Array() }; +} + +export const ResponseLoadSnapshotChunk = { + encode(message: ResponseLoadSnapshotChunk, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.chunk.length !== 0) { + writer.uint32(10).bytes(message.chunk); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseLoadSnapshotChunk { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseLoadSnapshotChunk(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.chunk = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseLoadSnapshotChunk { + return { chunk: isSet(object.chunk) ? bytesFromBase64(object.chunk) : new Uint8Array() }; + }, + + toJSON(message: ResponseLoadSnapshotChunk): unknown { + const obj: any = {}; + message.chunk !== undefined + && (obj.chunk = base64FromBytes(message.chunk !== undefined ? message.chunk : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): ResponseLoadSnapshotChunk { + const message = createBaseResponseLoadSnapshotChunk(); + message.chunk = object.chunk ?? new Uint8Array(); + return message; + }, +}; + +function createBaseResponseApplySnapshotChunk(): ResponseApplySnapshotChunk { + return { result: 0, refetchChunks: [], rejectSenders: [] }; +} + +export const ResponseApplySnapshotChunk = { + encode(message: ResponseApplySnapshotChunk, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.result !== 0) { + writer.uint32(8).int32(message.result); + } + writer.uint32(18).fork(); + for (const v of message.refetchChunks) { + writer.uint32(v); + } + writer.ldelim(); + for (const v of message.rejectSenders) { + writer.uint32(26).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseApplySnapshotChunk { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseApplySnapshotChunk(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.result = reader.int32() as any; + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.refetchChunks.push(reader.uint32()); + } + } else { + message.refetchChunks.push(reader.uint32()); + } + break; + case 3: + message.rejectSenders.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseApplySnapshotChunk { + return { + result: isSet(object.result) ? responseApplySnapshotChunk_ResultFromJSON(object.result) : 0, + refetchChunks: Array.isArray(object?.refetchChunks) ? object.refetchChunks.map((e: any) => Number(e)) : [], + rejectSenders: Array.isArray(object?.rejectSenders) ? object.rejectSenders.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: ResponseApplySnapshotChunk): unknown { + const obj: any = {}; + message.result !== undefined && (obj.result = responseApplySnapshotChunk_ResultToJSON(message.result)); + if (message.refetchChunks) { + obj.refetchChunks = message.refetchChunks.map((e) => Math.round(e)); + } else { + obj.refetchChunks = []; + } + if (message.rejectSenders) { + obj.rejectSenders = message.rejectSenders.map((e) => e); + } else { + obj.rejectSenders = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ResponseApplySnapshotChunk { + const message = createBaseResponseApplySnapshotChunk(); + message.result = object.result ?? 0; + message.refetchChunks = object.refetchChunks?.map((e) => e) || []; + message.rejectSenders = object.rejectSenders?.map((e) => e) || []; + return message; + }, +}; + +function createBaseResponsePrepareProposal(): ResponsePrepareProposal { + return { txs: [] }; +} + +export const ResponsePrepareProposal = { + encode(message: ResponsePrepareProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.txs) { + writer.uint32(10).bytes(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponsePrepareProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponsePrepareProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.txs.push(reader.bytes()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponsePrepareProposal { + return { txs: Array.isArray(object?.txs) ? object.txs.map((e: any) => bytesFromBase64(e)) : [] }; + }, + + toJSON(message: ResponsePrepareProposal): unknown { + const obj: any = {}; + if (message.txs) { + obj.txs = message.txs.map((e) => base64FromBytes(e !== undefined ? e : new Uint8Array())); + } else { + obj.txs = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ResponsePrepareProposal { + const message = createBaseResponsePrepareProposal(); + message.txs = object.txs?.map((e) => e) || []; + return message; + }, +}; + +function createBaseResponseProcessProposal(): ResponseProcessProposal { + return { status: 0 }; +} + +export const ResponseProcessProposal = { + encode(message: ResponseProcessProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.status !== 0) { + writer.uint32(8).int32(message.status); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResponseProcessProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResponseProcessProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.status = reader.int32() as any; + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ResponseProcessProposal { + return { status: isSet(object.status) ? responseProcessProposal_ProposalStatusFromJSON(object.status) : 0 }; + }, + + toJSON(message: ResponseProcessProposal): unknown { + const obj: any = {}; + message.status !== undefined && (obj.status = responseProcessProposal_ProposalStatusToJSON(message.status)); + return obj; + }, + + fromPartial, I>>(object: I): ResponseProcessProposal { + const message = createBaseResponseProcessProposal(); + message.status = object.status ?? 0; + return message; + }, +}; + +function createBaseCommitInfo(): CommitInfo { + return { round: 0, votes: [] }; +} + +export const CommitInfo = { + encode(message: CommitInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.round !== 0) { + writer.uint32(8).int32(message.round); + } + for (const v of message.votes) { + VoteInfo.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CommitInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommitInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.round = reader.int32(); + break; + case 2: + message.votes.push(VoteInfo.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CommitInfo { + return { + round: isSet(object.round) ? Number(object.round) : 0, + votes: Array.isArray(object?.votes) ? object.votes.map((e: any) => VoteInfo.fromJSON(e)) : [], + }; + }, + + toJSON(message: CommitInfo): unknown { + const obj: any = {}; + message.round !== undefined && (obj.round = Math.round(message.round)); + if (message.votes) { + obj.votes = message.votes.map((e) => e ? VoteInfo.toJSON(e) : undefined); + } else { + obj.votes = []; + } + return obj; + }, + + fromPartial, I>>(object: I): CommitInfo { + const message = createBaseCommitInfo(); + message.round = object.round ?? 0; + message.votes = object.votes?.map((e) => VoteInfo.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseExtendedCommitInfo(): ExtendedCommitInfo { + return { round: 0, votes: [] }; +} + +export const ExtendedCommitInfo = { + encode(message: ExtendedCommitInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.round !== 0) { + writer.uint32(8).int32(message.round); + } + for (const v of message.votes) { + ExtendedVoteInfo.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtendedCommitInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtendedCommitInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.round = reader.int32(); + break; + case 2: + message.votes.push(ExtendedVoteInfo.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtendedCommitInfo { + return { + round: isSet(object.round) ? Number(object.round) : 0, + votes: Array.isArray(object?.votes) ? object.votes.map((e: any) => ExtendedVoteInfo.fromJSON(e)) : [], + }; + }, + + toJSON(message: ExtendedCommitInfo): unknown { + const obj: any = {}; + message.round !== undefined && (obj.round = Math.round(message.round)); + if (message.votes) { + obj.votes = message.votes.map((e) => e ? ExtendedVoteInfo.toJSON(e) : undefined); + } else { + obj.votes = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtendedCommitInfo { + const message = createBaseExtendedCommitInfo(); + message.round = object.round ?? 0; + message.votes = object.votes?.map((e) => ExtendedVoteInfo.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEvent(): Event { + return { type: "", attributes: [] }; +} + +export const Event = { + encode(message: Event, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== "") { + writer.uint32(10).string(message.type); + } + for (const v of message.attributes) { + EventAttribute.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Event { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEvent(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.type = reader.string(); + break; + case 2: + message.attributes.push(EventAttribute.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Event { + return { + type: isSet(object.type) ? String(object.type) : "", + attributes: Array.isArray(object?.attributes) + ? object.attributes.map((e: any) => EventAttribute.fromJSON(e)) + : [], + }; + }, + + toJSON(message: Event): unknown { + const obj: any = {}; + message.type !== undefined && (obj.type = message.type); + if (message.attributes) { + obj.attributes = message.attributes.map((e) => e ? EventAttribute.toJSON(e) : undefined); + } else { + obj.attributes = []; + } + return obj; + }, + + fromPartial, I>>(object: I): Event { + const message = createBaseEvent(); + message.type = object.type ?? ""; + message.attributes = object.attributes?.map((e) => EventAttribute.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEventAttribute(): EventAttribute { + return { key: "", value: "", index: false }; +} + +export const EventAttribute = { + encode(message: EventAttribute, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + if (message.index === true) { + writer.uint32(24).bool(message.index); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EventAttribute { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventAttribute(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.string(); + break; + case 2: + message.value = reader.string(); + break; + case 3: + message.index = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EventAttribute { + return { + key: isSet(object.key) ? String(object.key) : "", + value: isSet(object.value) ? String(object.value) : "", + index: isSet(object.index) ? Boolean(object.index) : false, + }; + }, + + toJSON(message: EventAttribute): unknown { + const obj: any = {}; + message.key !== undefined && (obj.key = message.key); + message.value !== undefined && (obj.value = message.value); + message.index !== undefined && (obj.index = message.index); + return obj; + }, + + fromPartial, I>>(object: I): EventAttribute { + const message = createBaseEventAttribute(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + message.index = object.index ?? false; + return message; + }, +}; + +function createBaseTxResult(): TxResult { + return { height: 0, index: 0, tx: new Uint8Array(), result: undefined }; +} + +export const TxResult = { + encode(message: TxResult, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.height !== 0) { + writer.uint32(8).int64(message.height); + } + if (message.index !== 0) { + writer.uint32(16).uint32(message.index); + } + if (message.tx.length !== 0) { + writer.uint32(26).bytes(message.tx); + } + if (message.result !== undefined) { + ResponseDeliverTx.encode(message.result, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxResult { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxResult(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.height = longToNumber(reader.int64() as Long); + break; + case 2: + message.index = reader.uint32(); + break; + case 3: + message.tx = reader.bytes(); + break; + case 4: + message.result = ResponseDeliverTx.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxResult { + return { + height: isSet(object.height) ? Number(object.height) : 0, + index: isSet(object.index) ? Number(object.index) : 0, + tx: isSet(object.tx) ? bytesFromBase64(object.tx) : new Uint8Array(), + result: isSet(object.result) ? ResponseDeliverTx.fromJSON(object.result) : undefined, + }; + }, + + toJSON(message: TxResult): unknown { + const obj: any = {}; + message.height !== undefined && (obj.height = Math.round(message.height)); + message.index !== undefined && (obj.index = Math.round(message.index)); + message.tx !== undefined && (obj.tx = base64FromBytes(message.tx !== undefined ? message.tx : new Uint8Array())); + message.result !== undefined + && (obj.result = message.result ? ResponseDeliverTx.toJSON(message.result) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): TxResult { + const message = createBaseTxResult(); + message.height = object.height ?? 0; + message.index = object.index ?? 0; + message.tx = object.tx ?? new Uint8Array(); + message.result = (object.result !== undefined && object.result !== null) + ? ResponseDeliverTx.fromPartial(object.result) + : undefined; + return message; + }, +}; + +function createBaseValidator(): Validator { + return { address: new Uint8Array(), power: 0 }; +} + +export const Validator = { + encode(message: Validator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address.length !== 0) { + writer.uint32(10).bytes(message.address); + } + if (message.power !== 0) { + writer.uint32(24).int64(message.power); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Validator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.bytes(); + break; + case 3: + message.power = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Validator { + return { + address: isSet(object.address) ? bytesFromBase64(object.address) : new Uint8Array(), + power: isSet(object.power) ? Number(object.power) : 0, + }; + }, + + toJSON(message: Validator): unknown { + const obj: any = {}; + message.address !== undefined + && (obj.address = base64FromBytes(message.address !== undefined ? message.address : new Uint8Array())); + message.power !== undefined && (obj.power = Math.round(message.power)); + return obj; + }, + + fromPartial, I>>(object: I): Validator { + const message = createBaseValidator(); + message.address = object.address ?? new Uint8Array(); + message.power = object.power ?? 0; + return message; + }, +}; + +function createBaseValidatorUpdate(): ValidatorUpdate { + return { pubKey: undefined, power: 0 }; +} + +export const ValidatorUpdate = { + encode(message: ValidatorUpdate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pubKey !== undefined) { + PublicKey.encode(message.pubKey, writer.uint32(10).fork()).ldelim(); + } + if (message.power !== 0) { + writer.uint32(16).int64(message.power); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorUpdate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorUpdate(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pubKey = PublicKey.decode(reader, reader.uint32()); + break; + case 2: + message.power = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ValidatorUpdate { + return { + pubKey: isSet(object.pubKey) ? PublicKey.fromJSON(object.pubKey) : undefined, + power: isSet(object.power) ? Number(object.power) : 0, + }; + }, + + toJSON(message: ValidatorUpdate): unknown { + const obj: any = {}; + message.pubKey !== undefined && (obj.pubKey = message.pubKey ? PublicKey.toJSON(message.pubKey) : undefined); + message.power !== undefined && (obj.power = Math.round(message.power)); + return obj; + }, + + fromPartial, I>>(object: I): ValidatorUpdate { + const message = createBaseValidatorUpdate(); + message.pubKey = (object.pubKey !== undefined && object.pubKey !== null) + ? PublicKey.fromPartial(object.pubKey) + : undefined; + message.power = object.power ?? 0; + return message; + }, +}; + +function createBaseVoteInfo(): VoteInfo { + return { validator: undefined, signedLastBlock: false }; +} + +export const VoteInfo = { + encode(message: VoteInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validator !== undefined) { + Validator.encode(message.validator, writer.uint32(10).fork()).ldelim(); + } + if (message.signedLastBlock === true) { + writer.uint32(16).bool(message.signedLastBlock); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): VoteInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVoteInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.validator = Validator.decode(reader, reader.uint32()); + break; + case 2: + message.signedLastBlock = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): VoteInfo { + return { + validator: isSet(object.validator) ? Validator.fromJSON(object.validator) : undefined, + signedLastBlock: isSet(object.signedLastBlock) ? Boolean(object.signedLastBlock) : false, + }; + }, + + toJSON(message: VoteInfo): unknown { + const obj: any = {}; + message.validator !== undefined + && (obj.validator = message.validator ? Validator.toJSON(message.validator) : undefined); + message.signedLastBlock !== undefined && (obj.signedLastBlock = message.signedLastBlock); + return obj; + }, + + fromPartial, I>>(object: I): VoteInfo { + const message = createBaseVoteInfo(); + message.validator = (object.validator !== undefined && object.validator !== null) + ? Validator.fromPartial(object.validator) + : undefined; + message.signedLastBlock = object.signedLastBlock ?? false; + return message; + }, +}; + +function createBaseExtendedVoteInfo(): ExtendedVoteInfo { + return { validator: undefined, signedLastBlock: false, voteExtension: new Uint8Array() }; +} + +export const ExtendedVoteInfo = { + encode(message: ExtendedVoteInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validator !== undefined) { + Validator.encode(message.validator, writer.uint32(10).fork()).ldelim(); + } + if (message.signedLastBlock === true) { + writer.uint32(16).bool(message.signedLastBlock); + } + if (message.voteExtension.length !== 0) { + writer.uint32(26).bytes(message.voteExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtendedVoteInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtendedVoteInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.validator = Validator.decode(reader, reader.uint32()); + break; + case 2: + message.signedLastBlock = reader.bool(); + break; + case 3: + message.voteExtension = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtendedVoteInfo { + return { + validator: isSet(object.validator) ? Validator.fromJSON(object.validator) : undefined, + signedLastBlock: isSet(object.signedLastBlock) ? Boolean(object.signedLastBlock) : false, + voteExtension: isSet(object.voteExtension) ? bytesFromBase64(object.voteExtension) : new Uint8Array(), + }; + }, + + toJSON(message: ExtendedVoteInfo): unknown { + const obj: any = {}; + message.validator !== undefined + && (obj.validator = message.validator ? Validator.toJSON(message.validator) : undefined); + message.signedLastBlock !== undefined && (obj.signedLastBlock = message.signedLastBlock); + message.voteExtension !== undefined + && (obj.voteExtension = base64FromBytes( + message.voteExtension !== undefined ? message.voteExtension : new Uint8Array(), + )); + return obj; + }, + + fromPartial, I>>(object: I): ExtendedVoteInfo { + const message = createBaseExtendedVoteInfo(); + message.validator = (object.validator !== undefined && object.validator !== null) + ? Validator.fromPartial(object.validator) + : undefined; + message.signedLastBlock = object.signedLastBlock ?? false; + message.voteExtension = object.voteExtension ?? new Uint8Array(); + return message; + }, +}; + +function createBaseMisbehavior(): Misbehavior { + return { type: 0, validator: undefined, height: 0, time: undefined, totalVotingPower: 0 }; +} + +export const Misbehavior = { + encode(message: Misbehavior, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== 0) { + writer.uint32(8).int32(message.type); + } + if (message.validator !== undefined) { + Validator.encode(message.validator, writer.uint32(18).fork()).ldelim(); + } + if (message.height !== 0) { + writer.uint32(24).int64(message.height); + } + if (message.time !== undefined) { + Timestamp.encode(toTimestamp(message.time), writer.uint32(34).fork()).ldelim(); + } + if (message.totalVotingPower !== 0) { + writer.uint32(40).int64(message.totalVotingPower); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Misbehavior { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMisbehavior(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.type = reader.int32() as any; + break; + case 2: + message.validator = Validator.decode(reader, reader.uint32()); + break; + case 3: + message.height = longToNumber(reader.int64() as Long); + break; + case 4: + message.time = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 5: + message.totalVotingPower = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Misbehavior { + return { + type: isSet(object.type) ? misbehaviorTypeFromJSON(object.type) : 0, + validator: isSet(object.validator) ? Validator.fromJSON(object.validator) : undefined, + height: isSet(object.height) ? Number(object.height) : 0, + time: isSet(object.time) ? fromJsonTimestamp(object.time) : undefined, + totalVotingPower: isSet(object.totalVotingPower) ? Number(object.totalVotingPower) : 0, + }; + }, + + toJSON(message: Misbehavior): unknown { + const obj: any = {}; + message.type !== undefined && (obj.type = misbehaviorTypeToJSON(message.type)); + message.validator !== undefined + && (obj.validator = message.validator ? Validator.toJSON(message.validator) : undefined); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.time !== undefined && (obj.time = message.time.toISOString()); + message.totalVotingPower !== undefined && (obj.totalVotingPower = Math.round(message.totalVotingPower)); + return obj; + }, + + fromPartial, I>>(object: I): Misbehavior { + const message = createBaseMisbehavior(); + message.type = object.type ?? 0; + message.validator = (object.validator !== undefined && object.validator !== null) + ? Validator.fromPartial(object.validator) + : undefined; + message.height = object.height ?? 0; + message.time = object.time ?? undefined; + message.totalVotingPower = object.totalVotingPower ?? 0; + return message; + }, +}; + +function createBaseSnapshot(): Snapshot { + return { height: 0, format: 0, chunks: 0, hash: new Uint8Array(), metadata: new Uint8Array() }; +} + +export const Snapshot = { + encode(message: Snapshot, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.height !== 0) { + writer.uint32(8).uint64(message.height); + } + if (message.format !== 0) { + writer.uint32(16).uint32(message.format); + } + if (message.chunks !== 0) { + writer.uint32(24).uint32(message.chunks); + } + if (message.hash.length !== 0) { + writer.uint32(34).bytes(message.hash); + } + if (message.metadata.length !== 0) { + writer.uint32(42).bytes(message.metadata); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Snapshot { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSnapshot(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.height = longToNumber(reader.uint64() as Long); + break; + case 2: + message.format = reader.uint32(); + break; + case 3: + message.chunks = reader.uint32(); + break; + case 4: + message.hash = reader.bytes(); + break; + case 5: + message.metadata = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Snapshot { + return { + height: isSet(object.height) ? Number(object.height) : 0, + format: isSet(object.format) ? Number(object.format) : 0, + chunks: isSet(object.chunks) ? Number(object.chunks) : 0, + hash: isSet(object.hash) ? bytesFromBase64(object.hash) : new Uint8Array(), + metadata: isSet(object.metadata) ? bytesFromBase64(object.metadata) : new Uint8Array(), + }; + }, + + toJSON(message: Snapshot): unknown { + const obj: any = {}; + message.height !== undefined && (obj.height = Math.round(message.height)); + message.format !== undefined && (obj.format = Math.round(message.format)); + message.chunks !== undefined && (obj.chunks = Math.round(message.chunks)); + message.hash !== undefined + && (obj.hash = base64FromBytes(message.hash !== undefined ? message.hash : new Uint8Array())); + message.metadata !== undefined + && (obj.metadata = base64FromBytes(message.metadata !== undefined ? message.metadata : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): Snapshot { + const message = createBaseSnapshot(); + message.height = object.height ?? 0; + message.format = object.format ?? 0; + message.chunks = object.chunks ?? 0; + message.hash = object.hash ?? new Uint8Array(); + message.metadata = object.metadata ?? new Uint8Array(); + return message; + }, +}; + +export interface ABCIApplication { + Echo(request: RequestEcho): Promise; + Flush(request: RequestFlush): Promise; + Info(request: RequestInfo): Promise; + DeliverTx(request: RequestDeliverTx): Promise; + CheckTx(request: RequestCheckTx): Promise; + Query(request: RequestQuery): Promise; + Commit(request: RequestCommit): Promise; + InitChain(request: RequestInitChain): Promise; + BeginBlock(request: RequestBeginBlock): Promise; + EndBlock(request: RequestEndBlock): Promise; + ListSnapshots(request: RequestListSnapshots): Promise; + OfferSnapshot(request: RequestOfferSnapshot): Promise; + LoadSnapshotChunk(request: RequestLoadSnapshotChunk): Promise; + ApplySnapshotChunk(request: RequestApplySnapshotChunk): Promise; + PrepareProposal(request: RequestPrepareProposal): Promise; + ProcessProposal(request: RequestProcessProposal): Promise; +} + +export class ABCIApplicationClientImpl implements ABCIApplication { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.Echo = this.Echo.bind(this); + this.Flush = this.Flush.bind(this); + this.Info = this.Info.bind(this); + this.DeliverTx = this.DeliverTx.bind(this); + this.CheckTx = this.CheckTx.bind(this); + this.Query = this.Query.bind(this); + this.Commit = this.Commit.bind(this); + this.InitChain = this.InitChain.bind(this); + this.BeginBlock = this.BeginBlock.bind(this); + this.EndBlock = this.EndBlock.bind(this); + this.ListSnapshots = this.ListSnapshots.bind(this); + this.OfferSnapshot = this.OfferSnapshot.bind(this); + this.LoadSnapshotChunk = this.LoadSnapshotChunk.bind(this); + this.ApplySnapshotChunk = this.ApplySnapshotChunk.bind(this); + this.PrepareProposal = this.PrepareProposal.bind(this); + this.ProcessProposal = this.ProcessProposal.bind(this); + } + Echo(request: RequestEcho): Promise { + const data = RequestEcho.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "Echo", data); + return promise.then((data) => ResponseEcho.decode(new _m0.Reader(data))); + } + + Flush(request: RequestFlush): Promise { + const data = RequestFlush.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "Flush", data); + return promise.then((data) => ResponseFlush.decode(new _m0.Reader(data))); + } + + Info(request: RequestInfo): Promise { + const data = RequestInfo.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "Info", data); + return promise.then((data) => ResponseInfo.decode(new _m0.Reader(data))); + } + + DeliverTx(request: RequestDeliverTx): Promise { + const data = RequestDeliverTx.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "DeliverTx", data); + return promise.then((data) => ResponseDeliverTx.decode(new _m0.Reader(data))); + } + + CheckTx(request: RequestCheckTx): Promise { + const data = RequestCheckTx.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "CheckTx", data); + return promise.then((data) => ResponseCheckTx.decode(new _m0.Reader(data))); + } + + Query(request: RequestQuery): Promise { + const data = RequestQuery.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "Query", data); + return promise.then((data) => ResponseQuery.decode(new _m0.Reader(data))); + } + + Commit(request: RequestCommit): Promise { + const data = RequestCommit.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "Commit", data); + return promise.then((data) => ResponseCommit.decode(new _m0.Reader(data))); + } + + InitChain(request: RequestInitChain): Promise { + const data = RequestInitChain.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "InitChain", data); + return promise.then((data) => ResponseInitChain.decode(new _m0.Reader(data))); + } + + BeginBlock(request: RequestBeginBlock): Promise { + const data = RequestBeginBlock.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "BeginBlock", data); + return promise.then((data) => ResponseBeginBlock.decode(new _m0.Reader(data))); + } + + EndBlock(request: RequestEndBlock): Promise { + const data = RequestEndBlock.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "EndBlock", data); + return promise.then((data) => ResponseEndBlock.decode(new _m0.Reader(data))); + } + + ListSnapshots(request: RequestListSnapshots): Promise { + const data = RequestListSnapshots.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "ListSnapshots", data); + return promise.then((data) => ResponseListSnapshots.decode(new _m0.Reader(data))); + } + + OfferSnapshot(request: RequestOfferSnapshot): Promise { + const data = RequestOfferSnapshot.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "OfferSnapshot", data); + return promise.then((data) => ResponseOfferSnapshot.decode(new _m0.Reader(data))); + } + + LoadSnapshotChunk(request: RequestLoadSnapshotChunk): Promise { + const data = RequestLoadSnapshotChunk.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "LoadSnapshotChunk", data); + return promise.then((data) => ResponseLoadSnapshotChunk.decode(new _m0.Reader(data))); + } + + ApplySnapshotChunk(request: RequestApplySnapshotChunk): Promise { + const data = RequestApplySnapshotChunk.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "ApplySnapshotChunk", data); + return promise.then((data) => ResponseApplySnapshotChunk.decode(new _m0.Reader(data))); + } + + PrepareProposal(request: RequestPrepareProposal): Promise { + const data = RequestPrepareProposal.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "PrepareProposal", data); + return promise.then((data) => ResponsePrepareProposal.decode(new _m0.Reader(data))); + } + + ProcessProposal(request: RequestProcessProposal): Promise { + const data = RequestProcessProposal.encode(request).finish(); + const promise = this.rpc.request("tendermint.abci.ABCIApplication", "ProcessProposal", data); + return promise.then((data) => ResponseProcessProposal.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function toTimestamp(date: Date): Timestamp { + const seconds = date.getTime() / 1_000; + const nanos = (date.getTime() % 1_000) * 1_000_000; + return { seconds, nanos }; +} + +function fromTimestamp(t: Timestamp): Date { + let millis = t.seconds * 1_000; + millis += t.nanos / 1_000_000; + return new Date(millis); +} + +function fromJsonTimestamp(o: any): Date { + if (o instanceof Date) { + return o; + } else if (typeof o === "string") { + return new Date(o); + } else { + return fromTimestamp(Timestamp.fromJSON(o)); + } +} + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/tendermint/crypto/keys.ts b/ts-client/cosmos.tx.v1beta1/types/tendermint/crypto/keys.ts new file mode 100644 index 000000000..82d64fdf6 --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/tendermint/crypto/keys.ts @@ -0,0 +1,129 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "tendermint.crypto"; + +/** PublicKey defines the keys available for use with Validators */ +export interface PublicKey { + ed25519: Uint8Array | undefined; + secp256k1: Uint8Array | undefined; +} + +function createBasePublicKey(): PublicKey { + return { ed25519: undefined, secp256k1: undefined }; +} + +export const PublicKey = { + encode(message: PublicKey, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ed25519 !== undefined) { + writer.uint32(10).bytes(message.ed25519); + } + if (message.secp256k1 !== undefined) { + writer.uint32(18).bytes(message.secp256k1); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PublicKey { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePublicKey(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ed25519 = reader.bytes(); + break; + case 2: + message.secp256k1 = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PublicKey { + return { + ed25519: isSet(object.ed25519) ? bytesFromBase64(object.ed25519) : undefined, + secp256k1: isSet(object.secp256k1) ? bytesFromBase64(object.secp256k1) : undefined, + }; + }, + + toJSON(message: PublicKey): unknown { + const obj: any = {}; + message.ed25519 !== undefined + && (obj.ed25519 = message.ed25519 !== undefined ? base64FromBytes(message.ed25519) : undefined); + message.secp256k1 !== undefined + && (obj.secp256k1 = message.secp256k1 !== undefined ? base64FromBytes(message.secp256k1) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): PublicKey { + const message = createBasePublicKey(); + message.ed25519 = object.ed25519 ?? undefined; + message.secp256k1 = object.secp256k1 ?? undefined; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/tendermint/crypto/proof.ts b/ts-client/cosmos.tx.v1beta1/types/tendermint/crypto/proof.ts new file mode 100644 index 000000000..a307c776c --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/tendermint/crypto/proof.ts @@ -0,0 +1,439 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "tendermint.crypto"; + +export interface Proof { + total: number; + index: number; + leafHash: Uint8Array; + aunts: Uint8Array[]; +} + +export interface ValueOp { + /** Encoded in ProofOp.Key. */ + key: Uint8Array; + /** To encode in ProofOp.Data */ + proof: Proof | undefined; +} + +export interface DominoOp { + key: string; + input: string; + output: string; +} + +/** + * ProofOp defines an operation used for calculating Merkle root + * The data could be arbitrary format, providing nessecary data + * for example neighbouring node hash + */ +export interface ProofOp { + type: string; + key: Uint8Array; + data: Uint8Array; +} + +/** ProofOps is Merkle proof defined by the list of ProofOps */ +export interface ProofOps { + ops: ProofOp[]; +} + +function createBaseProof(): Proof { + return { total: 0, index: 0, leafHash: new Uint8Array(), aunts: [] }; +} + +export const Proof = { + encode(message: Proof, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.total !== 0) { + writer.uint32(8).int64(message.total); + } + if (message.index !== 0) { + writer.uint32(16).int64(message.index); + } + if (message.leafHash.length !== 0) { + writer.uint32(26).bytes(message.leafHash); + } + for (const v of message.aunts) { + writer.uint32(34).bytes(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Proof { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProof(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.total = longToNumber(reader.int64() as Long); + break; + case 2: + message.index = longToNumber(reader.int64() as Long); + break; + case 3: + message.leafHash = reader.bytes(); + break; + case 4: + message.aunts.push(reader.bytes()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Proof { + return { + total: isSet(object.total) ? Number(object.total) : 0, + index: isSet(object.index) ? Number(object.index) : 0, + leafHash: isSet(object.leafHash) ? bytesFromBase64(object.leafHash) : new Uint8Array(), + aunts: Array.isArray(object?.aunts) ? object.aunts.map((e: any) => bytesFromBase64(e)) : [], + }; + }, + + toJSON(message: Proof): unknown { + const obj: any = {}; + message.total !== undefined && (obj.total = Math.round(message.total)); + message.index !== undefined && (obj.index = Math.round(message.index)); + message.leafHash !== undefined + && (obj.leafHash = base64FromBytes(message.leafHash !== undefined ? message.leafHash : new Uint8Array())); + if (message.aunts) { + obj.aunts = message.aunts.map((e) => base64FromBytes(e !== undefined ? e : new Uint8Array())); + } else { + obj.aunts = []; + } + return obj; + }, + + fromPartial, I>>(object: I): Proof { + const message = createBaseProof(); + message.total = object.total ?? 0; + message.index = object.index ?? 0; + message.leafHash = object.leafHash ?? new Uint8Array(); + message.aunts = object.aunts?.map((e) => e) || []; + return message; + }, +}; + +function createBaseValueOp(): ValueOp { + return { key: new Uint8Array(), proof: undefined }; +} + +export const ValueOp = { + encode(message: ValueOp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.proof !== undefined) { + Proof.encode(message.proof, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValueOp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValueOp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + case 2: + message.proof = Proof.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ValueOp { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + proof: isSet(object.proof) ? Proof.fromJSON(object.proof) : undefined, + }; + }, + + toJSON(message: ValueOp): unknown { + const obj: any = {}; + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.proof !== undefined && (obj.proof = message.proof ? Proof.toJSON(message.proof) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ValueOp { + const message = createBaseValueOp(); + message.key = object.key ?? new Uint8Array(); + message.proof = (object.proof !== undefined && object.proof !== null) ? Proof.fromPartial(object.proof) : undefined; + return message; + }, +}; + +function createBaseDominoOp(): DominoOp { + return { key: "", input: "", output: "" }; +} + +export const DominoOp = { + encode(message: DominoOp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.input !== "") { + writer.uint32(18).string(message.input); + } + if (message.output !== "") { + writer.uint32(26).string(message.output); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DominoOp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDominoOp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.string(); + break; + case 2: + message.input = reader.string(); + break; + case 3: + message.output = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DominoOp { + return { + key: isSet(object.key) ? String(object.key) : "", + input: isSet(object.input) ? String(object.input) : "", + output: isSet(object.output) ? String(object.output) : "", + }; + }, + + toJSON(message: DominoOp): unknown { + const obj: any = {}; + message.key !== undefined && (obj.key = message.key); + message.input !== undefined && (obj.input = message.input); + message.output !== undefined && (obj.output = message.output); + return obj; + }, + + fromPartial, I>>(object: I): DominoOp { + const message = createBaseDominoOp(); + message.key = object.key ?? ""; + message.input = object.input ?? ""; + message.output = object.output ?? ""; + return message; + }, +}; + +function createBaseProofOp(): ProofOp { + return { type: "", key: new Uint8Array(), data: new Uint8Array() }; +} + +export const ProofOp = { + encode(message: ProofOp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== "") { + writer.uint32(10).string(message.type); + } + if (message.key.length !== 0) { + writer.uint32(18).bytes(message.key); + } + if (message.data.length !== 0) { + writer.uint32(26).bytes(message.data); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProofOp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProofOp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.type = reader.string(); + break; + case 2: + message.key = reader.bytes(); + break; + case 3: + message.data = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ProofOp { + return { + type: isSet(object.type) ? String(object.type) : "", + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(), + }; + }, + + toJSON(message: ProofOp): unknown { + const obj: any = {}; + message.type !== undefined && (obj.type = message.type); + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.data !== undefined + && (obj.data = base64FromBytes(message.data !== undefined ? message.data : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): ProofOp { + const message = createBaseProofOp(); + message.type = object.type ?? ""; + message.key = object.key ?? new Uint8Array(); + message.data = object.data ?? new Uint8Array(); + return message; + }, +}; + +function createBaseProofOps(): ProofOps { + return { ops: [] }; +} + +export const ProofOps = { + encode(message: ProofOps, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.ops) { + ProofOp.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProofOps { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProofOps(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ops.push(ProofOp.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ProofOps { + return { ops: Array.isArray(object?.ops) ? object.ops.map((e: any) => ProofOp.fromJSON(e)) : [] }; + }, + + toJSON(message: ProofOps): unknown { + const obj: any = {}; + if (message.ops) { + obj.ops = message.ops.map((e) => e ? ProofOp.toJSON(e) : undefined); + } else { + obj.ops = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ProofOps { + const message = createBaseProofOps(); + message.ops = object.ops?.map((e) => ProofOp.fromPartial(e)) || []; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/tendermint/types/block.ts b/ts-client/cosmos.tx.v1beta1/types/tendermint/types/block.ts new file mode 100644 index 000000000..329acae8b --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/tendermint/types/block.ts @@ -0,0 +1,112 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { EvidenceList } from "./evidence"; +import { Commit, Data, Header } from "./types"; + +export const protobufPackage = "tendermint.types"; + +export interface Block { + header: Header | undefined; + data: Data | undefined; + evidence: EvidenceList | undefined; + lastCommit: Commit | undefined; +} + +function createBaseBlock(): Block { + return { header: undefined, data: undefined, evidence: undefined, lastCommit: undefined }; +} + +export const Block = { + encode(message: Block, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.header !== undefined) { + Header.encode(message.header, writer.uint32(10).fork()).ldelim(); + } + if (message.data !== undefined) { + Data.encode(message.data, writer.uint32(18).fork()).ldelim(); + } + if (message.evidence !== undefined) { + EvidenceList.encode(message.evidence, writer.uint32(26).fork()).ldelim(); + } + if (message.lastCommit !== undefined) { + Commit.encode(message.lastCommit, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Block { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlock(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.header = Header.decode(reader, reader.uint32()); + break; + case 2: + message.data = Data.decode(reader, reader.uint32()); + break; + case 3: + message.evidence = EvidenceList.decode(reader, reader.uint32()); + break; + case 4: + message.lastCommit = Commit.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Block { + return { + header: isSet(object.header) ? Header.fromJSON(object.header) : undefined, + data: isSet(object.data) ? Data.fromJSON(object.data) : undefined, + evidence: isSet(object.evidence) ? EvidenceList.fromJSON(object.evidence) : undefined, + lastCommit: isSet(object.lastCommit) ? Commit.fromJSON(object.lastCommit) : undefined, + }; + }, + + toJSON(message: Block): unknown { + const obj: any = {}; + message.header !== undefined && (obj.header = message.header ? Header.toJSON(message.header) : undefined); + message.data !== undefined && (obj.data = message.data ? Data.toJSON(message.data) : undefined); + message.evidence !== undefined + && (obj.evidence = message.evidence ? EvidenceList.toJSON(message.evidence) : undefined); + message.lastCommit !== undefined + && (obj.lastCommit = message.lastCommit ? Commit.toJSON(message.lastCommit) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): Block { + const message = createBaseBlock(); + message.header = (object.header !== undefined && object.header !== null) + ? Header.fromPartial(object.header) + : undefined; + message.data = (object.data !== undefined && object.data !== null) ? Data.fromPartial(object.data) : undefined; + message.evidence = (object.evidence !== undefined && object.evidence !== null) + ? EvidenceList.fromPartial(object.evidence) + : undefined; + message.lastCommit = (object.lastCommit !== undefined && object.lastCommit !== null) + ? Commit.fromPartial(object.lastCommit) + : undefined; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/tendermint/types/evidence.ts b/ts-client/cosmos.tx.v1beta1/types/tendermint/types/evidence.ts new file mode 100644 index 000000000..98a4eb92e --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/tendermint/types/evidence.ts @@ -0,0 +1,412 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Timestamp } from "../../google/protobuf/timestamp"; +import { LightBlock, Vote } from "./types"; +import { Validator } from "./validator"; + +export const protobufPackage = "tendermint.types"; + +export interface Evidence { + duplicateVoteEvidence: DuplicateVoteEvidence | undefined; + lightClientAttackEvidence: LightClientAttackEvidence | undefined; +} + +/** DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes. */ +export interface DuplicateVoteEvidence { + voteA: Vote | undefined; + voteB: Vote | undefined; + totalVotingPower: number; + validatorPower: number; + timestamp: Date | undefined; +} + +/** LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client. */ +export interface LightClientAttackEvidence { + conflictingBlock: LightBlock | undefined; + commonHeight: number; + byzantineValidators: Validator[]; + totalVotingPower: number; + timestamp: Date | undefined; +} + +export interface EvidenceList { + evidence: Evidence[]; +} + +function createBaseEvidence(): Evidence { + return { duplicateVoteEvidence: undefined, lightClientAttackEvidence: undefined }; +} + +export const Evidence = { + encode(message: Evidence, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.duplicateVoteEvidence !== undefined) { + DuplicateVoteEvidence.encode(message.duplicateVoteEvidence, writer.uint32(10).fork()).ldelim(); + } + if (message.lightClientAttackEvidence !== undefined) { + LightClientAttackEvidence.encode(message.lightClientAttackEvidence, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Evidence { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEvidence(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.duplicateVoteEvidence = DuplicateVoteEvidence.decode(reader, reader.uint32()); + break; + case 2: + message.lightClientAttackEvidence = LightClientAttackEvidence.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Evidence { + return { + duplicateVoteEvidence: isSet(object.duplicateVoteEvidence) + ? DuplicateVoteEvidence.fromJSON(object.duplicateVoteEvidence) + : undefined, + lightClientAttackEvidence: isSet(object.lightClientAttackEvidence) + ? LightClientAttackEvidence.fromJSON(object.lightClientAttackEvidence) + : undefined, + }; + }, + + toJSON(message: Evidence): unknown { + const obj: any = {}; + message.duplicateVoteEvidence !== undefined && (obj.duplicateVoteEvidence = message.duplicateVoteEvidence + ? DuplicateVoteEvidence.toJSON(message.duplicateVoteEvidence) + : undefined); + message.lightClientAttackEvidence !== undefined + && (obj.lightClientAttackEvidence = message.lightClientAttackEvidence + ? LightClientAttackEvidence.toJSON(message.lightClientAttackEvidence) + : undefined); + return obj; + }, + + fromPartial, I>>(object: I): Evidence { + const message = createBaseEvidence(); + message.duplicateVoteEvidence = + (object.duplicateVoteEvidence !== undefined && object.duplicateVoteEvidence !== null) + ? DuplicateVoteEvidence.fromPartial(object.duplicateVoteEvidence) + : undefined; + message.lightClientAttackEvidence = + (object.lightClientAttackEvidence !== undefined && object.lightClientAttackEvidence !== null) + ? LightClientAttackEvidence.fromPartial(object.lightClientAttackEvidence) + : undefined; + return message; + }, +}; + +function createBaseDuplicateVoteEvidence(): DuplicateVoteEvidence { + return { voteA: undefined, voteB: undefined, totalVotingPower: 0, validatorPower: 0, timestamp: undefined }; +} + +export const DuplicateVoteEvidence = { + encode(message: DuplicateVoteEvidence, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.voteA !== undefined) { + Vote.encode(message.voteA, writer.uint32(10).fork()).ldelim(); + } + if (message.voteB !== undefined) { + Vote.encode(message.voteB, writer.uint32(18).fork()).ldelim(); + } + if (message.totalVotingPower !== 0) { + writer.uint32(24).int64(message.totalVotingPower); + } + if (message.validatorPower !== 0) { + writer.uint32(32).int64(message.validatorPower); + } + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(42).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DuplicateVoteEvidence { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDuplicateVoteEvidence(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.voteA = Vote.decode(reader, reader.uint32()); + break; + case 2: + message.voteB = Vote.decode(reader, reader.uint32()); + break; + case 3: + message.totalVotingPower = longToNumber(reader.int64() as Long); + break; + case 4: + message.validatorPower = longToNumber(reader.int64() as Long); + break; + case 5: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DuplicateVoteEvidence { + return { + voteA: isSet(object.voteA) ? Vote.fromJSON(object.voteA) : undefined, + voteB: isSet(object.voteB) ? Vote.fromJSON(object.voteB) : undefined, + totalVotingPower: isSet(object.totalVotingPower) ? Number(object.totalVotingPower) : 0, + validatorPower: isSet(object.validatorPower) ? Number(object.validatorPower) : 0, + timestamp: isSet(object.timestamp) ? fromJsonTimestamp(object.timestamp) : undefined, + }; + }, + + toJSON(message: DuplicateVoteEvidence): unknown { + const obj: any = {}; + message.voteA !== undefined && (obj.voteA = message.voteA ? Vote.toJSON(message.voteA) : undefined); + message.voteB !== undefined && (obj.voteB = message.voteB ? Vote.toJSON(message.voteB) : undefined); + message.totalVotingPower !== undefined && (obj.totalVotingPower = Math.round(message.totalVotingPower)); + message.validatorPower !== undefined && (obj.validatorPower = Math.round(message.validatorPower)); + message.timestamp !== undefined && (obj.timestamp = message.timestamp.toISOString()); + return obj; + }, + + fromPartial, I>>(object: I): DuplicateVoteEvidence { + const message = createBaseDuplicateVoteEvidence(); + message.voteA = (object.voteA !== undefined && object.voteA !== null) ? Vote.fromPartial(object.voteA) : undefined; + message.voteB = (object.voteB !== undefined && object.voteB !== null) ? Vote.fromPartial(object.voteB) : undefined; + message.totalVotingPower = object.totalVotingPower ?? 0; + message.validatorPower = object.validatorPower ?? 0; + message.timestamp = object.timestamp ?? undefined; + return message; + }, +}; + +function createBaseLightClientAttackEvidence(): LightClientAttackEvidence { + return { + conflictingBlock: undefined, + commonHeight: 0, + byzantineValidators: [], + totalVotingPower: 0, + timestamp: undefined, + }; +} + +export const LightClientAttackEvidence = { + encode(message: LightClientAttackEvidence, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.conflictingBlock !== undefined) { + LightBlock.encode(message.conflictingBlock, writer.uint32(10).fork()).ldelim(); + } + if (message.commonHeight !== 0) { + writer.uint32(16).int64(message.commonHeight); + } + for (const v of message.byzantineValidators) { + Validator.encode(v!, writer.uint32(26).fork()).ldelim(); + } + if (message.totalVotingPower !== 0) { + writer.uint32(32).int64(message.totalVotingPower); + } + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(42).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): LightClientAttackEvidence { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseLightClientAttackEvidence(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.conflictingBlock = LightBlock.decode(reader, reader.uint32()); + break; + case 2: + message.commonHeight = longToNumber(reader.int64() as Long); + break; + case 3: + message.byzantineValidators.push(Validator.decode(reader, reader.uint32())); + break; + case 4: + message.totalVotingPower = longToNumber(reader.int64() as Long); + break; + case 5: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): LightClientAttackEvidence { + return { + conflictingBlock: isSet(object.conflictingBlock) ? LightBlock.fromJSON(object.conflictingBlock) : undefined, + commonHeight: isSet(object.commonHeight) ? Number(object.commonHeight) : 0, + byzantineValidators: Array.isArray(object?.byzantineValidators) + ? object.byzantineValidators.map((e: any) => Validator.fromJSON(e)) + : [], + totalVotingPower: isSet(object.totalVotingPower) ? Number(object.totalVotingPower) : 0, + timestamp: isSet(object.timestamp) ? fromJsonTimestamp(object.timestamp) : undefined, + }; + }, + + toJSON(message: LightClientAttackEvidence): unknown { + const obj: any = {}; + message.conflictingBlock !== undefined + && (obj.conflictingBlock = message.conflictingBlock ? LightBlock.toJSON(message.conflictingBlock) : undefined); + message.commonHeight !== undefined && (obj.commonHeight = Math.round(message.commonHeight)); + if (message.byzantineValidators) { + obj.byzantineValidators = message.byzantineValidators.map((e) => e ? Validator.toJSON(e) : undefined); + } else { + obj.byzantineValidators = []; + } + message.totalVotingPower !== undefined && (obj.totalVotingPower = Math.round(message.totalVotingPower)); + message.timestamp !== undefined && (obj.timestamp = message.timestamp.toISOString()); + return obj; + }, + + fromPartial, I>>(object: I): LightClientAttackEvidence { + const message = createBaseLightClientAttackEvidence(); + message.conflictingBlock = (object.conflictingBlock !== undefined && object.conflictingBlock !== null) + ? LightBlock.fromPartial(object.conflictingBlock) + : undefined; + message.commonHeight = object.commonHeight ?? 0; + message.byzantineValidators = object.byzantineValidators?.map((e) => Validator.fromPartial(e)) || []; + message.totalVotingPower = object.totalVotingPower ?? 0; + message.timestamp = object.timestamp ?? undefined; + return message; + }, +}; + +function createBaseEvidenceList(): EvidenceList { + return { evidence: [] }; +} + +export const EvidenceList = { + encode(message: EvidenceList, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.evidence) { + Evidence.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EvidenceList { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEvidenceList(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.evidence.push(Evidence.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EvidenceList { + return { evidence: Array.isArray(object?.evidence) ? object.evidence.map((e: any) => Evidence.fromJSON(e)) : [] }; + }, + + toJSON(message: EvidenceList): unknown { + const obj: any = {}; + if (message.evidence) { + obj.evidence = message.evidence.map((e) => e ? Evidence.toJSON(e) : undefined); + } else { + obj.evidence = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EvidenceList { + const message = createBaseEvidenceList(); + message.evidence = object.evidence?.map((e) => Evidence.fromPartial(e)) || []; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function toTimestamp(date: Date): Timestamp { + const seconds = date.getTime() / 1_000; + const nanos = (date.getTime() % 1_000) * 1_000_000; + return { seconds, nanos }; +} + +function fromTimestamp(t: Timestamp): Date { + let millis = t.seconds * 1_000; + millis += t.nanos / 1_000_000; + return new Date(millis); +} + +function fromJsonTimestamp(o: any): Date { + if (o instanceof Date) { + return o; + } else if (typeof o === "string") { + return new Date(o); + } else { + return fromTimestamp(Timestamp.fromJSON(o)); + } +} + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/tendermint/types/params.ts b/ts-client/cosmos.tx.v1beta1/types/tendermint/types/params.ts new file mode 100644 index 000000000..56ca7eaba --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/tendermint/types/params.ts @@ -0,0 +1,498 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Duration } from "../../google/protobuf/duration"; + +export const protobufPackage = "tendermint.types"; + +/** + * ConsensusParams contains consensus critical parameters that determine the + * validity of blocks. + */ +export interface ConsensusParams { + block: BlockParams | undefined; + evidence: EvidenceParams | undefined; + validator: ValidatorParams | undefined; + version: VersionParams | undefined; +} + +/** BlockParams contains limits on the block size. */ +export interface BlockParams { + /** + * Max block size, in bytes. + * Note: must be greater than 0 + */ + maxBytes: number; + /** + * Max gas per block. + * Note: must be greater or equal to -1 + */ + maxGas: number; +} + +/** EvidenceParams determine how we handle evidence of malfeasance. */ +export interface EvidenceParams { + /** + * Max age of evidence, in blocks. + * + * The basic formula for calculating this is: MaxAgeDuration / {average block + * time}. + */ + maxAgeNumBlocks: number; + /** + * Max age of evidence, in time. + * + * It should correspond with an app's "unbonding period" or other similar + * mechanism for handling [Nothing-At-Stake + * attacks](https://github.com/ethereum/wiki/wiki/Proof-of-Stake-FAQ#what-is-the-nothing-at-stake-problem-and-how-can-it-be-fixed). + */ + maxAgeDuration: + | Duration + | undefined; + /** + * This sets the maximum size of total evidence in bytes that can be committed in a single block. + * and should fall comfortably under the max block bytes. + * Default is 1048576 or 1MB + */ + maxBytes: number; +} + +/** + * ValidatorParams restrict the public key types validators can use. + * NOTE: uses ABCI pubkey naming, not Amino names. + */ +export interface ValidatorParams { + pubKeyTypes: string[]; +} + +/** VersionParams contains the ABCI application version. */ +export interface VersionParams { + app: number; +} + +/** + * HashedParams is a subset of ConsensusParams. + * + * It is hashed into the Header.ConsensusHash. + */ +export interface HashedParams { + blockMaxBytes: number; + blockMaxGas: number; +} + +function createBaseConsensusParams(): ConsensusParams { + return { block: undefined, evidence: undefined, validator: undefined, version: undefined }; +} + +export const ConsensusParams = { + encode(message: ConsensusParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.block !== undefined) { + BlockParams.encode(message.block, writer.uint32(10).fork()).ldelim(); + } + if (message.evidence !== undefined) { + EvidenceParams.encode(message.evidence, writer.uint32(18).fork()).ldelim(); + } + if (message.validator !== undefined) { + ValidatorParams.encode(message.validator, writer.uint32(26).fork()).ldelim(); + } + if (message.version !== undefined) { + VersionParams.encode(message.version, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ConsensusParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConsensusParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.block = BlockParams.decode(reader, reader.uint32()); + break; + case 2: + message.evidence = EvidenceParams.decode(reader, reader.uint32()); + break; + case 3: + message.validator = ValidatorParams.decode(reader, reader.uint32()); + break; + case 4: + message.version = VersionParams.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ConsensusParams { + return { + block: isSet(object.block) ? BlockParams.fromJSON(object.block) : undefined, + evidence: isSet(object.evidence) ? EvidenceParams.fromJSON(object.evidence) : undefined, + validator: isSet(object.validator) ? ValidatorParams.fromJSON(object.validator) : undefined, + version: isSet(object.version) ? VersionParams.fromJSON(object.version) : undefined, + }; + }, + + toJSON(message: ConsensusParams): unknown { + const obj: any = {}; + message.block !== undefined && (obj.block = message.block ? BlockParams.toJSON(message.block) : undefined); + message.evidence !== undefined + && (obj.evidence = message.evidence ? EvidenceParams.toJSON(message.evidence) : undefined); + message.validator !== undefined + && (obj.validator = message.validator ? ValidatorParams.toJSON(message.validator) : undefined); + message.version !== undefined + && (obj.version = message.version ? VersionParams.toJSON(message.version) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ConsensusParams { + const message = createBaseConsensusParams(); + message.block = (object.block !== undefined && object.block !== null) + ? BlockParams.fromPartial(object.block) + : undefined; + message.evidence = (object.evidence !== undefined && object.evidence !== null) + ? EvidenceParams.fromPartial(object.evidence) + : undefined; + message.validator = (object.validator !== undefined && object.validator !== null) + ? ValidatorParams.fromPartial(object.validator) + : undefined; + message.version = (object.version !== undefined && object.version !== null) + ? VersionParams.fromPartial(object.version) + : undefined; + return message; + }, +}; + +function createBaseBlockParams(): BlockParams { + return { maxBytes: 0, maxGas: 0 }; +} + +export const BlockParams = { + encode(message: BlockParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.maxBytes !== 0) { + writer.uint32(8).int64(message.maxBytes); + } + if (message.maxGas !== 0) { + writer.uint32(16).int64(message.maxGas); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BlockParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.maxBytes = longToNumber(reader.int64() as Long); + break; + case 2: + message.maxGas = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): BlockParams { + return { + maxBytes: isSet(object.maxBytes) ? Number(object.maxBytes) : 0, + maxGas: isSet(object.maxGas) ? Number(object.maxGas) : 0, + }; + }, + + toJSON(message: BlockParams): unknown { + const obj: any = {}; + message.maxBytes !== undefined && (obj.maxBytes = Math.round(message.maxBytes)); + message.maxGas !== undefined && (obj.maxGas = Math.round(message.maxGas)); + return obj; + }, + + fromPartial, I>>(object: I): BlockParams { + const message = createBaseBlockParams(); + message.maxBytes = object.maxBytes ?? 0; + message.maxGas = object.maxGas ?? 0; + return message; + }, +}; + +function createBaseEvidenceParams(): EvidenceParams { + return { maxAgeNumBlocks: 0, maxAgeDuration: undefined, maxBytes: 0 }; +} + +export const EvidenceParams = { + encode(message: EvidenceParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.maxAgeNumBlocks !== 0) { + writer.uint32(8).int64(message.maxAgeNumBlocks); + } + if (message.maxAgeDuration !== undefined) { + Duration.encode(message.maxAgeDuration, writer.uint32(18).fork()).ldelim(); + } + if (message.maxBytes !== 0) { + writer.uint32(24).int64(message.maxBytes); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EvidenceParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEvidenceParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.maxAgeNumBlocks = longToNumber(reader.int64() as Long); + break; + case 2: + message.maxAgeDuration = Duration.decode(reader, reader.uint32()); + break; + case 3: + message.maxBytes = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EvidenceParams { + return { + maxAgeNumBlocks: isSet(object.maxAgeNumBlocks) ? Number(object.maxAgeNumBlocks) : 0, + maxAgeDuration: isSet(object.maxAgeDuration) ? Duration.fromJSON(object.maxAgeDuration) : undefined, + maxBytes: isSet(object.maxBytes) ? Number(object.maxBytes) : 0, + }; + }, + + toJSON(message: EvidenceParams): unknown { + const obj: any = {}; + message.maxAgeNumBlocks !== undefined && (obj.maxAgeNumBlocks = Math.round(message.maxAgeNumBlocks)); + message.maxAgeDuration !== undefined + && (obj.maxAgeDuration = message.maxAgeDuration ? Duration.toJSON(message.maxAgeDuration) : undefined); + message.maxBytes !== undefined && (obj.maxBytes = Math.round(message.maxBytes)); + return obj; + }, + + fromPartial, I>>(object: I): EvidenceParams { + const message = createBaseEvidenceParams(); + message.maxAgeNumBlocks = object.maxAgeNumBlocks ?? 0; + message.maxAgeDuration = (object.maxAgeDuration !== undefined && object.maxAgeDuration !== null) + ? Duration.fromPartial(object.maxAgeDuration) + : undefined; + message.maxBytes = object.maxBytes ?? 0; + return message; + }, +}; + +function createBaseValidatorParams(): ValidatorParams { + return { pubKeyTypes: [] }; +} + +export const ValidatorParams = { + encode(message: ValidatorParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.pubKeyTypes) { + writer.uint32(10).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pubKeyTypes.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ValidatorParams { + return { pubKeyTypes: Array.isArray(object?.pubKeyTypes) ? object.pubKeyTypes.map((e: any) => String(e)) : [] }; + }, + + toJSON(message: ValidatorParams): unknown { + const obj: any = {}; + if (message.pubKeyTypes) { + obj.pubKeyTypes = message.pubKeyTypes.map((e) => e); + } else { + obj.pubKeyTypes = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ValidatorParams { + const message = createBaseValidatorParams(); + message.pubKeyTypes = object.pubKeyTypes?.map((e) => e) || []; + return message; + }, +}; + +function createBaseVersionParams(): VersionParams { + return { app: 0 }; +} + +export const VersionParams = { + encode(message: VersionParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.app !== 0) { + writer.uint32(8).uint64(message.app); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): VersionParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVersionParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.app = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): VersionParams { + return { app: isSet(object.app) ? Number(object.app) : 0 }; + }, + + toJSON(message: VersionParams): unknown { + const obj: any = {}; + message.app !== undefined && (obj.app = Math.round(message.app)); + return obj; + }, + + fromPartial, I>>(object: I): VersionParams { + const message = createBaseVersionParams(); + message.app = object.app ?? 0; + return message; + }, +}; + +function createBaseHashedParams(): HashedParams { + return { blockMaxBytes: 0, blockMaxGas: 0 }; +} + +export const HashedParams = { + encode(message: HashedParams, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockMaxBytes !== 0) { + writer.uint32(8).int64(message.blockMaxBytes); + } + if (message.blockMaxGas !== 0) { + writer.uint32(16).int64(message.blockMaxGas); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HashedParams { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHashedParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.blockMaxBytes = longToNumber(reader.int64() as Long); + break; + case 2: + message.blockMaxGas = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HashedParams { + return { + blockMaxBytes: isSet(object.blockMaxBytes) ? Number(object.blockMaxBytes) : 0, + blockMaxGas: isSet(object.blockMaxGas) ? Number(object.blockMaxGas) : 0, + }; + }, + + toJSON(message: HashedParams): unknown { + const obj: any = {}; + message.blockMaxBytes !== undefined && (obj.blockMaxBytes = Math.round(message.blockMaxBytes)); + message.blockMaxGas !== undefined && (obj.blockMaxGas = Math.round(message.blockMaxGas)); + return obj; + }, + + fromPartial, I>>(object: I): HashedParams { + const message = createBaseHashedParams(); + message.blockMaxBytes = object.blockMaxBytes ?? 0; + message.blockMaxGas = object.blockMaxGas ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/tendermint/types/types.ts b/ts-client/cosmos.tx.v1beta1/types/tendermint/types/types.ts new file mode 100644 index 000000000..93cdfab9b --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/tendermint/types/types.ts @@ -0,0 +1,1452 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Timestamp } from "../../google/protobuf/timestamp"; +import { Proof } from "../crypto/proof"; +import { Consensus } from "../version/types"; +import { ValidatorSet } from "./validator"; + +export const protobufPackage = "tendermint.types"; + +/** BlockIdFlag indicates which BlcokID the signature is for */ +export enum BlockIDFlag { + BLOCK_ID_FLAG_UNKNOWN = 0, + BLOCK_ID_FLAG_ABSENT = 1, + BLOCK_ID_FLAG_COMMIT = 2, + BLOCK_ID_FLAG_NIL = 3, + UNRECOGNIZED = -1, +} + +export function blockIDFlagFromJSON(object: any): BlockIDFlag { + switch (object) { + case 0: + case "BLOCK_ID_FLAG_UNKNOWN": + return BlockIDFlag.BLOCK_ID_FLAG_UNKNOWN; + case 1: + case "BLOCK_ID_FLAG_ABSENT": + return BlockIDFlag.BLOCK_ID_FLAG_ABSENT; + case 2: + case "BLOCK_ID_FLAG_COMMIT": + return BlockIDFlag.BLOCK_ID_FLAG_COMMIT; + case 3: + case "BLOCK_ID_FLAG_NIL": + return BlockIDFlag.BLOCK_ID_FLAG_NIL; + case -1: + case "UNRECOGNIZED": + default: + return BlockIDFlag.UNRECOGNIZED; + } +} + +export function blockIDFlagToJSON(object: BlockIDFlag): string { + switch (object) { + case BlockIDFlag.BLOCK_ID_FLAG_UNKNOWN: + return "BLOCK_ID_FLAG_UNKNOWN"; + case BlockIDFlag.BLOCK_ID_FLAG_ABSENT: + return "BLOCK_ID_FLAG_ABSENT"; + case BlockIDFlag.BLOCK_ID_FLAG_COMMIT: + return "BLOCK_ID_FLAG_COMMIT"; + case BlockIDFlag.BLOCK_ID_FLAG_NIL: + return "BLOCK_ID_FLAG_NIL"; + case BlockIDFlag.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** SignedMsgType is a type of signed message in the consensus. */ +export enum SignedMsgType { + SIGNED_MSG_TYPE_UNKNOWN = 0, + /** SIGNED_MSG_TYPE_PREVOTE - Votes */ + SIGNED_MSG_TYPE_PREVOTE = 1, + SIGNED_MSG_TYPE_PRECOMMIT = 2, + /** SIGNED_MSG_TYPE_PROPOSAL - Proposals */ + SIGNED_MSG_TYPE_PROPOSAL = 32, + UNRECOGNIZED = -1, +} + +export function signedMsgTypeFromJSON(object: any): SignedMsgType { + switch (object) { + case 0: + case "SIGNED_MSG_TYPE_UNKNOWN": + return SignedMsgType.SIGNED_MSG_TYPE_UNKNOWN; + case 1: + case "SIGNED_MSG_TYPE_PREVOTE": + return SignedMsgType.SIGNED_MSG_TYPE_PREVOTE; + case 2: + case "SIGNED_MSG_TYPE_PRECOMMIT": + return SignedMsgType.SIGNED_MSG_TYPE_PRECOMMIT; + case 32: + case "SIGNED_MSG_TYPE_PROPOSAL": + return SignedMsgType.SIGNED_MSG_TYPE_PROPOSAL; + case -1: + case "UNRECOGNIZED": + default: + return SignedMsgType.UNRECOGNIZED; + } +} + +export function signedMsgTypeToJSON(object: SignedMsgType): string { + switch (object) { + case SignedMsgType.SIGNED_MSG_TYPE_UNKNOWN: + return "SIGNED_MSG_TYPE_UNKNOWN"; + case SignedMsgType.SIGNED_MSG_TYPE_PREVOTE: + return "SIGNED_MSG_TYPE_PREVOTE"; + case SignedMsgType.SIGNED_MSG_TYPE_PRECOMMIT: + return "SIGNED_MSG_TYPE_PRECOMMIT"; + case SignedMsgType.SIGNED_MSG_TYPE_PROPOSAL: + return "SIGNED_MSG_TYPE_PROPOSAL"; + case SignedMsgType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** PartsetHeader */ +export interface PartSetHeader { + total: number; + hash: Uint8Array; +} + +export interface Part { + index: number; + bytes: Uint8Array; + proof: Proof | undefined; +} + +/** BlockID */ +export interface BlockID { + hash: Uint8Array; + partSetHeader: PartSetHeader | undefined; +} + +/** Header defines the structure of a block header. */ +export interface Header { + /** basic block info */ + version: Consensus | undefined; + chainId: string; + height: number; + time: + | Date + | undefined; + /** prev block info */ + lastBlockId: + | BlockID + | undefined; + /** hashes of block data */ + lastCommitHash: Uint8Array; + /** transactions */ + dataHash: Uint8Array; + /** hashes from the app output from the prev block */ + validatorsHash: Uint8Array; + /** validators for the next block */ + nextValidatorsHash: Uint8Array; + /** consensus params for current block */ + consensusHash: Uint8Array; + /** state after txs from the previous block */ + appHash: Uint8Array; + /** root hash of all results from the txs from the previous block */ + lastResultsHash: Uint8Array; + /** consensus info */ + evidenceHash: Uint8Array; + /** original proposer of the block */ + proposerAddress: Uint8Array; +} + +/** Data contains the set of transactions included in the block */ +export interface Data { + /** + * Txs that will be applied by state @ block.Height+1. + * NOTE: not all txs here are valid. We're just agreeing on the order first. + * This means that block.AppHash does not include these txs. + */ + txs: Uint8Array[]; +} + +/** + * Vote represents a prevote, precommit, or commit vote from validators for + * consensus. + */ +export interface Vote { + type: SignedMsgType; + height: number; + round: number; + /** zero if vote is nil. */ + blockId: BlockID | undefined; + timestamp: Date | undefined; + validatorAddress: Uint8Array; + validatorIndex: number; + signature: Uint8Array; +} + +/** Commit contains the evidence that a block was committed by a set of validators. */ +export interface Commit { + height: number; + round: number; + blockId: BlockID | undefined; + signatures: CommitSig[]; +} + +/** CommitSig is a part of the Vote included in a Commit. */ +export interface CommitSig { + blockIdFlag: BlockIDFlag; + validatorAddress: Uint8Array; + timestamp: Date | undefined; + signature: Uint8Array; +} + +export interface Proposal { + type: SignedMsgType; + height: number; + round: number; + polRound: number; + blockId: BlockID | undefined; + timestamp: Date | undefined; + signature: Uint8Array; +} + +export interface SignedHeader { + header: Header | undefined; + commit: Commit | undefined; +} + +export interface LightBlock { + signedHeader: SignedHeader | undefined; + validatorSet: ValidatorSet | undefined; +} + +export interface BlockMeta { + blockId: BlockID | undefined; + blockSize: number; + header: Header | undefined; + numTxs: number; +} + +/** TxProof represents a Merkle proof of the presence of a transaction in the Merkle tree. */ +export interface TxProof { + rootHash: Uint8Array; + data: Uint8Array; + proof: Proof | undefined; +} + +function createBasePartSetHeader(): PartSetHeader { + return { total: 0, hash: new Uint8Array() }; +} + +export const PartSetHeader = { + encode(message: PartSetHeader, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.total !== 0) { + writer.uint32(8).uint32(message.total); + } + if (message.hash.length !== 0) { + writer.uint32(18).bytes(message.hash); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PartSetHeader { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePartSetHeader(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.total = reader.uint32(); + break; + case 2: + message.hash = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PartSetHeader { + return { + total: isSet(object.total) ? Number(object.total) : 0, + hash: isSet(object.hash) ? bytesFromBase64(object.hash) : new Uint8Array(), + }; + }, + + toJSON(message: PartSetHeader): unknown { + const obj: any = {}; + message.total !== undefined && (obj.total = Math.round(message.total)); + message.hash !== undefined + && (obj.hash = base64FromBytes(message.hash !== undefined ? message.hash : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): PartSetHeader { + const message = createBasePartSetHeader(); + message.total = object.total ?? 0; + message.hash = object.hash ?? new Uint8Array(); + return message; + }, +}; + +function createBasePart(): Part { + return { index: 0, bytes: new Uint8Array(), proof: undefined }; +} + +export const Part = { + encode(message: Part, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.index !== 0) { + writer.uint32(8).uint32(message.index); + } + if (message.bytes.length !== 0) { + writer.uint32(18).bytes(message.bytes); + } + if (message.proof !== undefined) { + Proof.encode(message.proof, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Part { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.index = reader.uint32(); + break; + case 2: + message.bytes = reader.bytes(); + break; + case 3: + message.proof = Proof.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Part { + return { + index: isSet(object.index) ? Number(object.index) : 0, + bytes: isSet(object.bytes) ? bytesFromBase64(object.bytes) : new Uint8Array(), + proof: isSet(object.proof) ? Proof.fromJSON(object.proof) : undefined, + }; + }, + + toJSON(message: Part): unknown { + const obj: any = {}; + message.index !== undefined && (obj.index = Math.round(message.index)); + message.bytes !== undefined + && (obj.bytes = base64FromBytes(message.bytes !== undefined ? message.bytes : new Uint8Array())); + message.proof !== undefined && (obj.proof = message.proof ? Proof.toJSON(message.proof) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): Part { + const message = createBasePart(); + message.index = object.index ?? 0; + message.bytes = object.bytes ?? new Uint8Array(); + message.proof = (object.proof !== undefined && object.proof !== null) ? Proof.fromPartial(object.proof) : undefined; + return message; + }, +}; + +function createBaseBlockID(): BlockID { + return { hash: new Uint8Array(), partSetHeader: undefined }; +} + +export const BlockID = { + encode(message: BlockID, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.hash.length !== 0) { + writer.uint32(10).bytes(message.hash); + } + if (message.partSetHeader !== undefined) { + PartSetHeader.encode(message.partSetHeader, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BlockID { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockID(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.hash = reader.bytes(); + break; + case 2: + message.partSetHeader = PartSetHeader.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): BlockID { + return { + hash: isSet(object.hash) ? bytesFromBase64(object.hash) : new Uint8Array(), + partSetHeader: isSet(object.partSetHeader) ? PartSetHeader.fromJSON(object.partSetHeader) : undefined, + }; + }, + + toJSON(message: BlockID): unknown { + const obj: any = {}; + message.hash !== undefined + && (obj.hash = base64FromBytes(message.hash !== undefined ? message.hash : new Uint8Array())); + message.partSetHeader !== undefined + && (obj.partSetHeader = message.partSetHeader ? PartSetHeader.toJSON(message.partSetHeader) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): BlockID { + const message = createBaseBlockID(); + message.hash = object.hash ?? new Uint8Array(); + message.partSetHeader = (object.partSetHeader !== undefined && object.partSetHeader !== null) + ? PartSetHeader.fromPartial(object.partSetHeader) + : undefined; + return message; + }, +}; + +function createBaseHeader(): Header { + return { + version: undefined, + chainId: "", + height: 0, + time: undefined, + lastBlockId: undefined, + lastCommitHash: new Uint8Array(), + dataHash: new Uint8Array(), + validatorsHash: new Uint8Array(), + nextValidatorsHash: new Uint8Array(), + consensusHash: new Uint8Array(), + appHash: new Uint8Array(), + lastResultsHash: new Uint8Array(), + evidenceHash: new Uint8Array(), + proposerAddress: new Uint8Array(), + }; +} + +export const Header = { + encode(message: Header, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.version !== undefined) { + Consensus.encode(message.version, writer.uint32(10).fork()).ldelim(); + } + if (message.chainId !== "") { + writer.uint32(18).string(message.chainId); + } + if (message.height !== 0) { + writer.uint32(24).int64(message.height); + } + if (message.time !== undefined) { + Timestamp.encode(toTimestamp(message.time), writer.uint32(34).fork()).ldelim(); + } + if (message.lastBlockId !== undefined) { + BlockID.encode(message.lastBlockId, writer.uint32(42).fork()).ldelim(); + } + if (message.lastCommitHash.length !== 0) { + writer.uint32(50).bytes(message.lastCommitHash); + } + if (message.dataHash.length !== 0) { + writer.uint32(58).bytes(message.dataHash); + } + if (message.validatorsHash.length !== 0) { + writer.uint32(66).bytes(message.validatorsHash); + } + if (message.nextValidatorsHash.length !== 0) { + writer.uint32(74).bytes(message.nextValidatorsHash); + } + if (message.consensusHash.length !== 0) { + writer.uint32(82).bytes(message.consensusHash); + } + if (message.appHash.length !== 0) { + writer.uint32(90).bytes(message.appHash); + } + if (message.lastResultsHash.length !== 0) { + writer.uint32(98).bytes(message.lastResultsHash); + } + if (message.evidenceHash.length !== 0) { + writer.uint32(106).bytes(message.evidenceHash); + } + if (message.proposerAddress.length !== 0) { + writer.uint32(114).bytes(message.proposerAddress); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Header { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHeader(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.version = Consensus.decode(reader, reader.uint32()); + break; + case 2: + message.chainId = reader.string(); + break; + case 3: + message.height = longToNumber(reader.int64() as Long); + break; + case 4: + message.time = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 5: + message.lastBlockId = BlockID.decode(reader, reader.uint32()); + break; + case 6: + message.lastCommitHash = reader.bytes(); + break; + case 7: + message.dataHash = reader.bytes(); + break; + case 8: + message.validatorsHash = reader.bytes(); + break; + case 9: + message.nextValidatorsHash = reader.bytes(); + break; + case 10: + message.consensusHash = reader.bytes(); + break; + case 11: + message.appHash = reader.bytes(); + break; + case 12: + message.lastResultsHash = reader.bytes(); + break; + case 13: + message.evidenceHash = reader.bytes(); + break; + case 14: + message.proposerAddress = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Header { + return { + version: isSet(object.version) ? Consensus.fromJSON(object.version) : undefined, + chainId: isSet(object.chainId) ? String(object.chainId) : "", + height: isSet(object.height) ? Number(object.height) : 0, + time: isSet(object.time) ? fromJsonTimestamp(object.time) : undefined, + lastBlockId: isSet(object.lastBlockId) ? BlockID.fromJSON(object.lastBlockId) : undefined, + lastCommitHash: isSet(object.lastCommitHash) ? bytesFromBase64(object.lastCommitHash) : new Uint8Array(), + dataHash: isSet(object.dataHash) ? bytesFromBase64(object.dataHash) : new Uint8Array(), + validatorsHash: isSet(object.validatorsHash) ? bytesFromBase64(object.validatorsHash) : new Uint8Array(), + nextValidatorsHash: isSet(object.nextValidatorsHash) + ? bytesFromBase64(object.nextValidatorsHash) + : new Uint8Array(), + consensusHash: isSet(object.consensusHash) ? bytesFromBase64(object.consensusHash) : new Uint8Array(), + appHash: isSet(object.appHash) ? bytesFromBase64(object.appHash) : new Uint8Array(), + lastResultsHash: isSet(object.lastResultsHash) ? bytesFromBase64(object.lastResultsHash) : new Uint8Array(), + evidenceHash: isSet(object.evidenceHash) ? bytesFromBase64(object.evidenceHash) : new Uint8Array(), + proposerAddress: isSet(object.proposerAddress) ? bytesFromBase64(object.proposerAddress) : new Uint8Array(), + }; + }, + + toJSON(message: Header): unknown { + const obj: any = {}; + message.version !== undefined && (obj.version = message.version ? Consensus.toJSON(message.version) : undefined); + message.chainId !== undefined && (obj.chainId = message.chainId); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.time !== undefined && (obj.time = message.time.toISOString()); + message.lastBlockId !== undefined + && (obj.lastBlockId = message.lastBlockId ? BlockID.toJSON(message.lastBlockId) : undefined); + message.lastCommitHash !== undefined + && (obj.lastCommitHash = base64FromBytes( + message.lastCommitHash !== undefined ? message.lastCommitHash : new Uint8Array(), + )); + message.dataHash !== undefined + && (obj.dataHash = base64FromBytes(message.dataHash !== undefined ? message.dataHash : new Uint8Array())); + message.validatorsHash !== undefined + && (obj.validatorsHash = base64FromBytes( + message.validatorsHash !== undefined ? message.validatorsHash : new Uint8Array(), + )); + message.nextValidatorsHash !== undefined + && (obj.nextValidatorsHash = base64FromBytes( + message.nextValidatorsHash !== undefined ? message.nextValidatorsHash : new Uint8Array(), + )); + message.consensusHash !== undefined + && (obj.consensusHash = base64FromBytes( + message.consensusHash !== undefined ? message.consensusHash : new Uint8Array(), + )); + message.appHash !== undefined + && (obj.appHash = base64FromBytes(message.appHash !== undefined ? message.appHash : new Uint8Array())); + message.lastResultsHash !== undefined + && (obj.lastResultsHash = base64FromBytes( + message.lastResultsHash !== undefined ? message.lastResultsHash : new Uint8Array(), + )); + message.evidenceHash !== undefined + && (obj.evidenceHash = base64FromBytes( + message.evidenceHash !== undefined ? message.evidenceHash : new Uint8Array(), + )); + message.proposerAddress !== undefined + && (obj.proposerAddress = base64FromBytes( + message.proposerAddress !== undefined ? message.proposerAddress : new Uint8Array(), + )); + return obj; + }, + + fromPartial, I>>(object: I): Header { + const message = createBaseHeader(); + message.version = (object.version !== undefined && object.version !== null) + ? Consensus.fromPartial(object.version) + : undefined; + message.chainId = object.chainId ?? ""; + message.height = object.height ?? 0; + message.time = object.time ?? undefined; + message.lastBlockId = (object.lastBlockId !== undefined && object.lastBlockId !== null) + ? BlockID.fromPartial(object.lastBlockId) + : undefined; + message.lastCommitHash = object.lastCommitHash ?? new Uint8Array(); + message.dataHash = object.dataHash ?? new Uint8Array(); + message.validatorsHash = object.validatorsHash ?? new Uint8Array(); + message.nextValidatorsHash = object.nextValidatorsHash ?? new Uint8Array(); + message.consensusHash = object.consensusHash ?? new Uint8Array(); + message.appHash = object.appHash ?? new Uint8Array(); + message.lastResultsHash = object.lastResultsHash ?? new Uint8Array(); + message.evidenceHash = object.evidenceHash ?? new Uint8Array(); + message.proposerAddress = object.proposerAddress ?? new Uint8Array(); + return message; + }, +}; + +function createBaseData(): Data { + return { txs: [] }; +} + +export const Data = { + encode(message: Data, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.txs) { + writer.uint32(10).bytes(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Data { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseData(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.txs.push(reader.bytes()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Data { + return { txs: Array.isArray(object?.txs) ? object.txs.map((e: any) => bytesFromBase64(e)) : [] }; + }, + + toJSON(message: Data): unknown { + const obj: any = {}; + if (message.txs) { + obj.txs = message.txs.map((e) => base64FromBytes(e !== undefined ? e : new Uint8Array())); + } else { + obj.txs = []; + } + return obj; + }, + + fromPartial, I>>(object: I): Data { + const message = createBaseData(); + message.txs = object.txs?.map((e) => e) || []; + return message; + }, +}; + +function createBaseVote(): Vote { + return { + type: 0, + height: 0, + round: 0, + blockId: undefined, + timestamp: undefined, + validatorAddress: new Uint8Array(), + validatorIndex: 0, + signature: new Uint8Array(), + }; +} + +export const Vote = { + encode(message: Vote, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== 0) { + writer.uint32(8).int32(message.type); + } + if (message.height !== 0) { + writer.uint32(16).int64(message.height); + } + if (message.round !== 0) { + writer.uint32(24).int32(message.round); + } + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(34).fork()).ldelim(); + } + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(42).fork()).ldelim(); + } + if (message.validatorAddress.length !== 0) { + writer.uint32(50).bytes(message.validatorAddress); + } + if (message.validatorIndex !== 0) { + writer.uint32(56).int32(message.validatorIndex); + } + if (message.signature.length !== 0) { + writer.uint32(66).bytes(message.signature); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Vote { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVote(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.type = reader.int32() as any; + break; + case 2: + message.height = longToNumber(reader.int64() as Long); + break; + case 3: + message.round = reader.int32(); + break; + case 4: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + case 5: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 6: + message.validatorAddress = reader.bytes(); + break; + case 7: + message.validatorIndex = reader.int32(); + break; + case 8: + message.signature = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Vote { + return { + type: isSet(object.type) ? signedMsgTypeFromJSON(object.type) : 0, + height: isSet(object.height) ? Number(object.height) : 0, + round: isSet(object.round) ? Number(object.round) : 0, + blockId: isSet(object.blockId) ? BlockID.fromJSON(object.blockId) : undefined, + timestamp: isSet(object.timestamp) ? fromJsonTimestamp(object.timestamp) : undefined, + validatorAddress: isSet(object.validatorAddress) ? bytesFromBase64(object.validatorAddress) : new Uint8Array(), + validatorIndex: isSet(object.validatorIndex) ? Number(object.validatorIndex) : 0, + signature: isSet(object.signature) ? bytesFromBase64(object.signature) : new Uint8Array(), + }; + }, + + toJSON(message: Vote): unknown { + const obj: any = {}; + message.type !== undefined && (obj.type = signedMsgTypeToJSON(message.type)); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.round !== undefined && (obj.round = Math.round(message.round)); + message.blockId !== undefined && (obj.blockId = message.blockId ? BlockID.toJSON(message.blockId) : undefined); + message.timestamp !== undefined && (obj.timestamp = message.timestamp.toISOString()); + message.validatorAddress !== undefined + && (obj.validatorAddress = base64FromBytes( + message.validatorAddress !== undefined ? message.validatorAddress : new Uint8Array(), + )); + message.validatorIndex !== undefined && (obj.validatorIndex = Math.round(message.validatorIndex)); + message.signature !== undefined + && (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): Vote { + const message = createBaseVote(); + message.type = object.type ?? 0; + message.height = object.height ?? 0; + message.round = object.round ?? 0; + message.blockId = (object.blockId !== undefined && object.blockId !== null) + ? BlockID.fromPartial(object.blockId) + : undefined; + message.timestamp = object.timestamp ?? undefined; + message.validatorAddress = object.validatorAddress ?? new Uint8Array(); + message.validatorIndex = object.validatorIndex ?? 0; + message.signature = object.signature ?? new Uint8Array(); + return message; + }, +}; + +function createBaseCommit(): Commit { + return { height: 0, round: 0, blockId: undefined, signatures: [] }; +} + +export const Commit = { + encode(message: Commit, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.height !== 0) { + writer.uint32(8).int64(message.height); + } + if (message.round !== 0) { + writer.uint32(16).int32(message.round); + } + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.signatures) { + CommitSig.encode(v!, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Commit { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommit(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.height = longToNumber(reader.int64() as Long); + break; + case 2: + message.round = reader.int32(); + break; + case 3: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + case 4: + message.signatures.push(CommitSig.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Commit { + return { + height: isSet(object.height) ? Number(object.height) : 0, + round: isSet(object.round) ? Number(object.round) : 0, + blockId: isSet(object.blockId) ? BlockID.fromJSON(object.blockId) : undefined, + signatures: Array.isArray(object?.signatures) ? object.signatures.map((e: any) => CommitSig.fromJSON(e)) : [], + }; + }, + + toJSON(message: Commit): unknown { + const obj: any = {}; + message.height !== undefined && (obj.height = Math.round(message.height)); + message.round !== undefined && (obj.round = Math.round(message.round)); + message.blockId !== undefined && (obj.blockId = message.blockId ? BlockID.toJSON(message.blockId) : undefined); + if (message.signatures) { + obj.signatures = message.signatures.map((e) => e ? CommitSig.toJSON(e) : undefined); + } else { + obj.signatures = []; + } + return obj; + }, + + fromPartial, I>>(object: I): Commit { + const message = createBaseCommit(); + message.height = object.height ?? 0; + message.round = object.round ?? 0; + message.blockId = (object.blockId !== undefined && object.blockId !== null) + ? BlockID.fromPartial(object.blockId) + : undefined; + message.signatures = object.signatures?.map((e) => CommitSig.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCommitSig(): CommitSig { + return { blockIdFlag: 0, validatorAddress: new Uint8Array(), timestamp: undefined, signature: new Uint8Array() }; +} + +export const CommitSig = { + encode(message: CommitSig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockIdFlag !== 0) { + writer.uint32(8).int32(message.blockIdFlag); + } + if (message.validatorAddress.length !== 0) { + writer.uint32(18).bytes(message.validatorAddress); + } + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(26).fork()).ldelim(); + } + if (message.signature.length !== 0) { + writer.uint32(34).bytes(message.signature); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CommitSig { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCommitSig(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.blockIdFlag = reader.int32() as any; + break; + case 2: + message.validatorAddress = reader.bytes(); + break; + case 3: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 4: + message.signature = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CommitSig { + return { + blockIdFlag: isSet(object.blockIdFlag) ? blockIDFlagFromJSON(object.blockIdFlag) : 0, + validatorAddress: isSet(object.validatorAddress) ? bytesFromBase64(object.validatorAddress) : new Uint8Array(), + timestamp: isSet(object.timestamp) ? fromJsonTimestamp(object.timestamp) : undefined, + signature: isSet(object.signature) ? bytesFromBase64(object.signature) : new Uint8Array(), + }; + }, + + toJSON(message: CommitSig): unknown { + const obj: any = {}; + message.blockIdFlag !== undefined && (obj.blockIdFlag = blockIDFlagToJSON(message.blockIdFlag)); + message.validatorAddress !== undefined + && (obj.validatorAddress = base64FromBytes( + message.validatorAddress !== undefined ? message.validatorAddress : new Uint8Array(), + )); + message.timestamp !== undefined && (obj.timestamp = message.timestamp.toISOString()); + message.signature !== undefined + && (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): CommitSig { + const message = createBaseCommitSig(); + message.blockIdFlag = object.blockIdFlag ?? 0; + message.validatorAddress = object.validatorAddress ?? new Uint8Array(); + message.timestamp = object.timestamp ?? undefined; + message.signature = object.signature ?? new Uint8Array(); + return message; + }, +}; + +function createBaseProposal(): Proposal { + return { + type: 0, + height: 0, + round: 0, + polRound: 0, + blockId: undefined, + timestamp: undefined, + signature: new Uint8Array(), + }; +} + +export const Proposal = { + encode(message: Proposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.type !== 0) { + writer.uint32(8).int32(message.type); + } + if (message.height !== 0) { + writer.uint32(16).int64(message.height); + } + if (message.round !== 0) { + writer.uint32(24).int32(message.round); + } + if (message.polRound !== 0) { + writer.uint32(32).int32(message.polRound); + } + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(42).fork()).ldelim(); + } + if (message.timestamp !== undefined) { + Timestamp.encode(toTimestamp(message.timestamp), writer.uint32(50).fork()).ldelim(); + } + if (message.signature.length !== 0) { + writer.uint32(58).bytes(message.signature); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Proposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.type = reader.int32() as any; + break; + case 2: + message.height = longToNumber(reader.int64() as Long); + break; + case 3: + message.round = reader.int32(); + break; + case 4: + message.polRound = reader.int32(); + break; + case 5: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + case 6: + message.timestamp = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 7: + message.signature = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Proposal { + return { + type: isSet(object.type) ? signedMsgTypeFromJSON(object.type) : 0, + height: isSet(object.height) ? Number(object.height) : 0, + round: isSet(object.round) ? Number(object.round) : 0, + polRound: isSet(object.polRound) ? Number(object.polRound) : 0, + blockId: isSet(object.blockId) ? BlockID.fromJSON(object.blockId) : undefined, + timestamp: isSet(object.timestamp) ? fromJsonTimestamp(object.timestamp) : undefined, + signature: isSet(object.signature) ? bytesFromBase64(object.signature) : new Uint8Array(), + }; + }, + + toJSON(message: Proposal): unknown { + const obj: any = {}; + message.type !== undefined && (obj.type = signedMsgTypeToJSON(message.type)); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.round !== undefined && (obj.round = Math.round(message.round)); + message.polRound !== undefined && (obj.polRound = Math.round(message.polRound)); + message.blockId !== undefined && (obj.blockId = message.blockId ? BlockID.toJSON(message.blockId) : undefined); + message.timestamp !== undefined && (obj.timestamp = message.timestamp.toISOString()); + message.signature !== undefined + && (obj.signature = base64FromBytes(message.signature !== undefined ? message.signature : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): Proposal { + const message = createBaseProposal(); + message.type = object.type ?? 0; + message.height = object.height ?? 0; + message.round = object.round ?? 0; + message.polRound = object.polRound ?? 0; + message.blockId = (object.blockId !== undefined && object.blockId !== null) + ? BlockID.fromPartial(object.blockId) + : undefined; + message.timestamp = object.timestamp ?? undefined; + message.signature = object.signature ?? new Uint8Array(); + return message; + }, +}; + +function createBaseSignedHeader(): SignedHeader { + return { header: undefined, commit: undefined }; +} + +export const SignedHeader = { + encode(message: SignedHeader, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.header !== undefined) { + Header.encode(message.header, writer.uint32(10).fork()).ldelim(); + } + if (message.commit !== undefined) { + Commit.encode(message.commit, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SignedHeader { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSignedHeader(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.header = Header.decode(reader, reader.uint32()); + break; + case 2: + message.commit = Commit.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SignedHeader { + return { + header: isSet(object.header) ? Header.fromJSON(object.header) : undefined, + commit: isSet(object.commit) ? Commit.fromJSON(object.commit) : undefined, + }; + }, + + toJSON(message: SignedHeader): unknown { + const obj: any = {}; + message.header !== undefined && (obj.header = message.header ? Header.toJSON(message.header) : undefined); + message.commit !== undefined && (obj.commit = message.commit ? Commit.toJSON(message.commit) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): SignedHeader { + const message = createBaseSignedHeader(); + message.header = (object.header !== undefined && object.header !== null) + ? Header.fromPartial(object.header) + : undefined; + message.commit = (object.commit !== undefined && object.commit !== null) + ? Commit.fromPartial(object.commit) + : undefined; + return message; + }, +}; + +function createBaseLightBlock(): LightBlock { + return { signedHeader: undefined, validatorSet: undefined }; +} + +export const LightBlock = { + encode(message: LightBlock, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signedHeader !== undefined) { + SignedHeader.encode(message.signedHeader, writer.uint32(10).fork()).ldelim(); + } + if (message.validatorSet !== undefined) { + ValidatorSet.encode(message.validatorSet, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): LightBlock { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseLightBlock(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signedHeader = SignedHeader.decode(reader, reader.uint32()); + break; + case 2: + message.validatorSet = ValidatorSet.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): LightBlock { + return { + signedHeader: isSet(object.signedHeader) ? SignedHeader.fromJSON(object.signedHeader) : undefined, + validatorSet: isSet(object.validatorSet) ? ValidatorSet.fromJSON(object.validatorSet) : undefined, + }; + }, + + toJSON(message: LightBlock): unknown { + const obj: any = {}; + message.signedHeader !== undefined + && (obj.signedHeader = message.signedHeader ? SignedHeader.toJSON(message.signedHeader) : undefined); + message.validatorSet !== undefined + && (obj.validatorSet = message.validatorSet ? ValidatorSet.toJSON(message.validatorSet) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): LightBlock { + const message = createBaseLightBlock(); + message.signedHeader = (object.signedHeader !== undefined && object.signedHeader !== null) + ? SignedHeader.fromPartial(object.signedHeader) + : undefined; + message.validatorSet = (object.validatorSet !== undefined && object.validatorSet !== null) + ? ValidatorSet.fromPartial(object.validatorSet) + : undefined; + return message; + }, +}; + +function createBaseBlockMeta(): BlockMeta { + return { blockId: undefined, blockSize: 0, header: undefined, numTxs: 0 }; +} + +export const BlockMeta = { + encode(message: BlockMeta, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.blockId !== undefined) { + BlockID.encode(message.blockId, writer.uint32(10).fork()).ldelim(); + } + if (message.blockSize !== 0) { + writer.uint32(16).int64(message.blockSize); + } + if (message.header !== undefined) { + Header.encode(message.header, writer.uint32(26).fork()).ldelim(); + } + if (message.numTxs !== 0) { + writer.uint32(32).int64(message.numTxs); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BlockMeta { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBlockMeta(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.blockId = BlockID.decode(reader, reader.uint32()); + break; + case 2: + message.blockSize = longToNumber(reader.int64() as Long); + break; + case 3: + message.header = Header.decode(reader, reader.uint32()); + break; + case 4: + message.numTxs = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): BlockMeta { + return { + blockId: isSet(object.blockId) ? BlockID.fromJSON(object.blockId) : undefined, + blockSize: isSet(object.blockSize) ? Number(object.blockSize) : 0, + header: isSet(object.header) ? Header.fromJSON(object.header) : undefined, + numTxs: isSet(object.numTxs) ? Number(object.numTxs) : 0, + }; + }, + + toJSON(message: BlockMeta): unknown { + const obj: any = {}; + message.blockId !== undefined && (obj.blockId = message.blockId ? BlockID.toJSON(message.blockId) : undefined); + message.blockSize !== undefined && (obj.blockSize = Math.round(message.blockSize)); + message.header !== undefined && (obj.header = message.header ? Header.toJSON(message.header) : undefined); + message.numTxs !== undefined && (obj.numTxs = Math.round(message.numTxs)); + return obj; + }, + + fromPartial, I>>(object: I): BlockMeta { + const message = createBaseBlockMeta(); + message.blockId = (object.blockId !== undefined && object.blockId !== null) + ? BlockID.fromPartial(object.blockId) + : undefined; + message.blockSize = object.blockSize ?? 0; + message.header = (object.header !== undefined && object.header !== null) + ? Header.fromPartial(object.header) + : undefined; + message.numTxs = object.numTxs ?? 0; + return message; + }, +}; + +function createBaseTxProof(): TxProof { + return { rootHash: new Uint8Array(), data: new Uint8Array(), proof: undefined }; +} + +export const TxProof = { + encode(message: TxProof, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.rootHash.length !== 0) { + writer.uint32(10).bytes(message.rootHash); + } + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + if (message.proof !== undefined) { + Proof.encode(message.proof, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): TxProof { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTxProof(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rootHash = reader.bytes(); + break; + case 2: + message.data = reader.bytes(); + break; + case 3: + message.proof = Proof.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): TxProof { + return { + rootHash: isSet(object.rootHash) ? bytesFromBase64(object.rootHash) : new Uint8Array(), + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(), + proof: isSet(object.proof) ? Proof.fromJSON(object.proof) : undefined, + }; + }, + + toJSON(message: TxProof): unknown { + const obj: any = {}; + message.rootHash !== undefined + && (obj.rootHash = base64FromBytes(message.rootHash !== undefined ? message.rootHash : new Uint8Array())); + message.data !== undefined + && (obj.data = base64FromBytes(message.data !== undefined ? message.data : new Uint8Array())); + message.proof !== undefined && (obj.proof = message.proof ? Proof.toJSON(message.proof) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): TxProof { + const message = createBaseTxProof(); + message.rootHash = object.rootHash ?? new Uint8Array(); + message.data = object.data ?? new Uint8Array(); + message.proof = (object.proof !== undefined && object.proof !== null) ? Proof.fromPartial(object.proof) : undefined; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function toTimestamp(date: Date): Timestamp { + const seconds = date.getTime() / 1_000; + const nanos = (date.getTime() % 1_000) * 1_000_000; + return { seconds, nanos }; +} + +function fromTimestamp(t: Timestamp): Date { + let millis = t.seconds * 1_000; + millis += t.nanos / 1_000_000; + return new Date(millis); +} + +function fromJsonTimestamp(o: any): Date { + if (o instanceof Date) { + return o; + } else if (typeof o === "string") { + return new Date(o); + } else { + return fromTimestamp(Timestamp.fromJSON(o)); + } +} + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/tendermint/types/validator.ts b/ts-client/cosmos.tx.v1beta1/types/tendermint/types/validator.ts new file mode 100644 index 000000000..690845836 --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/tendermint/types/validator.ts @@ -0,0 +1,308 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { PublicKey } from "../crypto/keys"; + +export const protobufPackage = "tendermint.types"; + +export interface ValidatorSet { + validators: Validator[]; + proposer: Validator | undefined; + totalVotingPower: number; +} + +export interface Validator { + address: Uint8Array; + pubKey: PublicKey | undefined; + votingPower: number; + proposerPriority: number; +} + +export interface SimpleValidator { + pubKey: PublicKey | undefined; + votingPower: number; +} + +function createBaseValidatorSet(): ValidatorSet { + return { validators: [], proposer: undefined, totalVotingPower: 0 }; +} + +export const ValidatorSet = { + encode(message: ValidatorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.validators) { + Validator.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.proposer !== undefined) { + Validator.encode(message.proposer, writer.uint32(18).fork()).ldelim(); + } + if (message.totalVotingPower !== 0) { + writer.uint32(24).int64(message.totalVotingPower); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ValidatorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidatorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.validators.push(Validator.decode(reader, reader.uint32())); + break; + case 2: + message.proposer = Validator.decode(reader, reader.uint32()); + break; + case 3: + message.totalVotingPower = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ValidatorSet { + return { + validators: Array.isArray(object?.validators) ? object.validators.map((e: any) => Validator.fromJSON(e)) : [], + proposer: isSet(object.proposer) ? Validator.fromJSON(object.proposer) : undefined, + totalVotingPower: isSet(object.totalVotingPower) ? Number(object.totalVotingPower) : 0, + }; + }, + + toJSON(message: ValidatorSet): unknown { + const obj: any = {}; + if (message.validators) { + obj.validators = message.validators.map((e) => e ? Validator.toJSON(e) : undefined); + } else { + obj.validators = []; + } + message.proposer !== undefined + && (obj.proposer = message.proposer ? Validator.toJSON(message.proposer) : undefined); + message.totalVotingPower !== undefined && (obj.totalVotingPower = Math.round(message.totalVotingPower)); + return obj; + }, + + fromPartial, I>>(object: I): ValidatorSet { + const message = createBaseValidatorSet(); + message.validators = object.validators?.map((e) => Validator.fromPartial(e)) || []; + message.proposer = (object.proposer !== undefined && object.proposer !== null) + ? Validator.fromPartial(object.proposer) + : undefined; + message.totalVotingPower = object.totalVotingPower ?? 0; + return message; + }, +}; + +function createBaseValidator(): Validator { + return { address: new Uint8Array(), pubKey: undefined, votingPower: 0, proposerPriority: 0 }; +} + +export const Validator = { + encode(message: Validator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address.length !== 0) { + writer.uint32(10).bytes(message.address); + } + if (message.pubKey !== undefined) { + PublicKey.encode(message.pubKey, writer.uint32(18).fork()).ldelim(); + } + if (message.votingPower !== 0) { + writer.uint32(24).int64(message.votingPower); + } + if (message.proposerPriority !== 0) { + writer.uint32(32).int64(message.proposerPriority); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Validator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.bytes(); + break; + case 2: + message.pubKey = PublicKey.decode(reader, reader.uint32()); + break; + case 3: + message.votingPower = longToNumber(reader.int64() as Long); + break; + case 4: + message.proposerPriority = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Validator { + return { + address: isSet(object.address) ? bytesFromBase64(object.address) : new Uint8Array(), + pubKey: isSet(object.pubKey) ? PublicKey.fromJSON(object.pubKey) : undefined, + votingPower: isSet(object.votingPower) ? Number(object.votingPower) : 0, + proposerPriority: isSet(object.proposerPriority) ? Number(object.proposerPriority) : 0, + }; + }, + + toJSON(message: Validator): unknown { + const obj: any = {}; + message.address !== undefined + && (obj.address = base64FromBytes(message.address !== undefined ? message.address : new Uint8Array())); + message.pubKey !== undefined && (obj.pubKey = message.pubKey ? PublicKey.toJSON(message.pubKey) : undefined); + message.votingPower !== undefined && (obj.votingPower = Math.round(message.votingPower)); + message.proposerPriority !== undefined && (obj.proposerPriority = Math.round(message.proposerPriority)); + return obj; + }, + + fromPartial, I>>(object: I): Validator { + const message = createBaseValidator(); + message.address = object.address ?? new Uint8Array(); + message.pubKey = (object.pubKey !== undefined && object.pubKey !== null) + ? PublicKey.fromPartial(object.pubKey) + : undefined; + message.votingPower = object.votingPower ?? 0; + message.proposerPriority = object.proposerPriority ?? 0; + return message; + }, +}; + +function createBaseSimpleValidator(): SimpleValidator { + return { pubKey: undefined, votingPower: 0 }; +} + +export const SimpleValidator = { + encode(message: SimpleValidator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pubKey !== undefined) { + PublicKey.encode(message.pubKey, writer.uint32(10).fork()).ldelim(); + } + if (message.votingPower !== 0) { + writer.uint32(16).int64(message.votingPower); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SimpleValidator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSimpleValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pubKey = PublicKey.decode(reader, reader.uint32()); + break; + case 2: + message.votingPower = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SimpleValidator { + return { + pubKey: isSet(object.pubKey) ? PublicKey.fromJSON(object.pubKey) : undefined, + votingPower: isSet(object.votingPower) ? Number(object.votingPower) : 0, + }; + }, + + toJSON(message: SimpleValidator): unknown { + const obj: any = {}; + message.pubKey !== undefined && (obj.pubKey = message.pubKey ? PublicKey.toJSON(message.pubKey) : undefined); + message.votingPower !== undefined && (obj.votingPower = Math.round(message.votingPower)); + return obj; + }, + + fromPartial, I>>(object: I): SimpleValidator { + const message = createBaseSimpleValidator(); + message.pubKey = (object.pubKey !== undefined && object.pubKey !== null) + ? PublicKey.fromPartial(object.pubKey) + : undefined; + message.votingPower = object.votingPower ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.tx.v1beta1/types/tendermint/version/types.ts b/ts-client/cosmos.tx.v1beta1/types/tendermint/version/types.ts new file mode 100644 index 000000000..f326bec12 --- /dev/null +++ b/ts-client/cosmos.tx.v1beta1/types/tendermint/version/types.ts @@ -0,0 +1,184 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "tendermint.version"; + +/** + * App includes the protocol and software version for the application. + * This information is included in ResponseInfo. The App.Protocol can be + * updated in ResponseEndBlock. + */ +export interface App { + protocol: number; + software: string; +} + +/** + * Consensus captures the consensus rules for processing a block in the blockchain, + * including all blockchain data structures and the rules of the application's + * state transition machine. + */ +export interface Consensus { + block: number; + app: number; +} + +function createBaseApp(): App { + return { protocol: 0, software: "" }; +} + +export const App = { + encode(message: App, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.protocol !== 0) { + writer.uint32(8).uint64(message.protocol); + } + if (message.software !== "") { + writer.uint32(18).string(message.software); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): App { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseApp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.protocol = longToNumber(reader.uint64() as Long); + break; + case 2: + message.software = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): App { + return { + protocol: isSet(object.protocol) ? Number(object.protocol) : 0, + software: isSet(object.software) ? String(object.software) : "", + }; + }, + + toJSON(message: App): unknown { + const obj: any = {}; + message.protocol !== undefined && (obj.protocol = Math.round(message.protocol)); + message.software !== undefined && (obj.software = message.software); + return obj; + }, + + fromPartial, I>>(object: I): App { + const message = createBaseApp(); + message.protocol = object.protocol ?? 0; + message.software = object.software ?? ""; + return message; + }, +}; + +function createBaseConsensus(): Consensus { + return { block: 0, app: 0 }; +} + +export const Consensus = { + encode(message: Consensus, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.block !== 0) { + writer.uint32(8).uint64(message.block); + } + if (message.app !== 0) { + writer.uint32(16).uint64(message.app); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Consensus { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConsensus(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.block = longToNumber(reader.uint64() as Long); + break; + case 2: + message.app = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Consensus { + return { block: isSet(object.block) ? Number(object.block) : 0, app: isSet(object.app) ? Number(object.app) : 0 }; + }, + + toJSON(message: Consensus): unknown { + const obj: any = {}; + message.block !== undefined && (obj.block = Math.round(message.block)); + message.app !== undefined && (obj.app = Math.round(message.app)); + return obj; + }, + + fromPartial, I>>(object: I): Consensus { + const message = createBaseConsensus(); + message.block = object.block ?? 0; + message.app = object.app ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.upgrade.v1beta1/index.ts b/ts-client/cosmos.upgrade.v1beta1/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/cosmos.upgrade.v1beta1/module.ts b/ts-client/cosmos.upgrade.v1beta1/module.ts new file mode 100755 index 000000000..22466f801 --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/module.ts @@ -0,0 +1,170 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; +import { MsgSoftwareUpgrade } from "./types/cosmos/upgrade/v1beta1/tx"; +import { MsgCancelUpgrade } from "./types/cosmos/upgrade/v1beta1/tx"; + +import { Plan as typePlan} from "./types" +import { SoftwareUpgradeProposal as typeSoftwareUpgradeProposal} from "./types" +import { CancelSoftwareUpgradeProposal as typeCancelSoftwareUpgradeProposal} from "./types" +import { ModuleVersion as typeModuleVersion} from "./types" + +export { MsgSoftwareUpgrade, MsgCancelUpgrade }; + +type sendMsgSoftwareUpgradeParams = { + value: MsgSoftwareUpgrade, + fee?: StdFee, + memo?: string +}; + +type sendMsgCancelUpgradeParams = { + value: MsgCancelUpgrade, + fee?: StdFee, + memo?: string +}; + + +type msgSoftwareUpgradeParams = { + value: MsgSoftwareUpgrade, +}; + +type msgCancelUpgradeParams = { + value: MsgCancelUpgrade, +}; + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + async sendMsgSoftwareUpgrade({ value, fee, memo }: sendMsgSoftwareUpgradeParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgSoftwareUpgrade: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgSoftwareUpgrade({ value: MsgSoftwareUpgrade.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgSoftwareUpgrade: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgCancelUpgrade({ value, fee, memo }: sendMsgCancelUpgradeParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgCancelUpgrade: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgCancelUpgrade({ value: MsgCancelUpgrade.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgCancelUpgrade: Could not broadcast Tx: '+ e.message) + } + }, + + + msgSoftwareUpgrade({ value }: msgSoftwareUpgradeParams): EncodeObject { + try { + return { typeUrl: "/cosmos.upgrade.v1beta1.MsgSoftwareUpgrade", value: MsgSoftwareUpgrade.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgSoftwareUpgrade: Could not create message: ' + e.message) + } + }, + + msgCancelUpgrade({ value }: msgCancelUpgradeParams): EncodeObject { + try { + return { typeUrl: "/cosmos.upgrade.v1beta1.MsgCancelUpgrade", value: MsgCancelUpgrade.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgCancelUpgrade: Could not create message: ' + e.message) + } + }, + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + Plan: getStructure(typePlan.fromPartial({})), + SoftwareUpgradeProposal: getStructure(typeSoftwareUpgradeProposal.fromPartial({})), + CancelSoftwareUpgradeProposal: getStructure(typeCancelSoftwareUpgradeProposal.fromPartial({})), + ModuleVersion: getStructure(typeModuleVersion.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + CosmosUpgradeV1Beta1: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/cosmos.upgrade.v1beta1/registry.ts b/ts-client/cosmos.upgrade.v1beta1/registry.ts new file mode 100755 index 000000000..5e341994d --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/registry.ts @@ -0,0 +1,11 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; +import { MsgSoftwareUpgrade } from "./types/cosmos/upgrade/v1beta1/tx"; +import { MsgCancelUpgrade } from "./types/cosmos/upgrade/v1beta1/tx"; + +const msgTypes: Array<[string, GeneratedType]> = [ + ["/cosmos.upgrade.v1beta1.MsgSoftwareUpgrade", MsgSoftwareUpgrade], + ["/cosmos.upgrade.v1beta1.MsgCancelUpgrade", MsgCancelUpgrade], + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/cosmos.upgrade.v1beta1/rest.ts b/ts-client/cosmos.upgrade.v1beta1/rest.ts new file mode 100644 index 000000000..ad32f017d --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/rest.ts @@ -0,0 +1,467 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +/** +* `Any` contains an arbitrary serialized protocol buffer message along with a +URL that describes the type of the serialized message. + +Protobuf library provides support to pack/unpack Any values in the form +of utility functions or additional generated methods of the Any type. + +Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + +Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + +The pack methods provided by protobuf library will by default use +'type.googleapis.com/full.type.name' as the type URL and the unpack +methods only use the fully qualified type name after the last '/' +in the type URL, for example "foo.bar.com/x/y.z" will yield type +name "y.z". + + +JSON +==== +The JSON representation of an `Any` value uses the regular +representation of the deserialized, embedded message, with an +additional field `@type` which contains the type URL. Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + +If the embedded message type is well-known and has a custom JSON +representation, that representation will be embedded adding a field +`value` which holds the custom JSON in addition to the `@type` +field. Example (for message [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } +*/ +export interface ProtobufAny { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +/** +* ModuleVersion specifies a module and its consensus version. + +Since: cosmos-sdk 0.43 +*/ +export interface V1Beta1ModuleVersion { + /** name of the app module */ + name?: string; + + /** + * consensus version of the app module + * @format uint64 + */ + version?: string; +} + +/** +* MsgCancelUpgradeResponse is the Msg/CancelUpgrade response type. + +Since: cosmos-sdk 0.46 +*/ +export type V1Beta1MsgCancelUpgradeResponse = object; + +/** +* MsgSoftwareUpgradeResponse is the Msg/SoftwareUpgrade response type. + +Since: cosmos-sdk 0.46 +*/ +export type V1Beta1MsgSoftwareUpgradeResponse = object; + +/** + * Plan specifies information about a planned upgrade and when it should occur. + */ +export interface V1Beta1Plan { + /** + * Sets the name for the upgrade. This name will be used by the upgraded + * version of the software to apply any special "on-upgrade" commands during + * the first BeginBlock method after the upgrade is applied. It is also used + * to detect whether a software version can handle a given upgrade. If no + * upgrade handler with this name has been set in the software, it will be + * assumed that the software is out-of-date when the upgrade Time or Height is + * reached and the software will exit. + */ + name?: string; + + /** + * Deprecated: Time based upgrades have been deprecated. Time based upgrade logic + * has been removed from the SDK. + * If this field is not empty, an error will be thrown. + * @format date-time + */ + time?: string; + + /** + * The height at which the upgrade must be performed. + * @format int64 + */ + height?: string; + + /** + * Any application specific upgrade info to be included on-chain + * such as a git commit that validators could automatically upgrade to + */ + info?: string; + + /** + * Deprecated: UpgradedClientState field has been deprecated. IBC upgrade logic has been + * moved to the IBC module in the sub module 02-client. + * If this field is not empty, an error will be thrown. + */ + upgraded_client_state?: ProtobufAny; +} + +/** +* QueryAppliedPlanResponse is the response type for the Query/AppliedPlan RPC +method. +*/ +export interface V1Beta1QueryAppliedPlanResponse { + /** + * height is the block height at which the plan was applied. + * @format int64 + */ + height?: string; +} + +/** + * Since: cosmos-sdk 0.46 + */ +export interface V1Beta1QueryAuthorityResponse { + address?: string; +} + +/** +* QueryCurrentPlanResponse is the response type for the Query/CurrentPlan RPC +method. +*/ +export interface V1Beta1QueryCurrentPlanResponse { + /** plan is the current upgrade plan. */ + plan?: V1Beta1Plan; +} + +/** +* QueryModuleVersionsResponse is the response type for the Query/ModuleVersions +RPC method. + +Since: cosmos-sdk 0.43 +*/ +export interface V1Beta1QueryModuleVersionsResponse { + /** module_versions is a list of module names with their consensus versions. */ + module_versions?: V1Beta1ModuleVersion[]; +} + +/** +* QueryUpgradedConsensusStateResponse is the response type for the Query/UpgradedConsensusState +RPC method. +*/ +export interface V1Beta1QueryUpgradedConsensusStateResponse { + /** + * Since: cosmos-sdk 0.43 + * @format byte + */ + upgraded_consensus_state?: string; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title cosmos/upgrade/v1beta1/query.proto + * @version version not set + */ +export class Api extends HttpClient { + /** + * No description + * + * @tags Query + * @name QueryAppliedPlan + * @summary AppliedPlan queries a previously applied upgrade plan by its name. + * @request GET:/cosmos/upgrade/v1beta1/applied_plan/{name} + */ + queryAppliedPlan = (name: string, params: RequestParams = {}) => + this.request({ + path: `/cosmos/upgrade/v1beta1/applied_plan/${name}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * @description Since: cosmos-sdk 0.46 + * + * @tags Query + * @name QueryAuthority + * @summary Returns the account with authority to conduct upgrades + * @request GET:/cosmos/upgrade/v1beta1/authority + */ + queryAuthority = (params: RequestParams = {}) => + this.request({ + path: `/cosmos/upgrade/v1beta1/authority`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryCurrentPlan + * @summary CurrentPlan queries the current upgrade plan. + * @request GET:/cosmos/upgrade/v1beta1/current_plan + */ + queryCurrentPlan = (params: RequestParams = {}) => + this.request({ + path: `/cosmos/upgrade/v1beta1/current_plan`, + method: "GET", + format: "json", + ...params, + }); + + /** + * @description Since: cosmos-sdk 0.43 + * + * @tags Query + * @name QueryModuleVersions + * @summary ModuleVersions queries the list of module versions from state. + * @request GET:/cosmos/upgrade/v1beta1/module_versions + */ + queryModuleVersions = (query?: { module_name?: string }, params: RequestParams = {}) => + this.request({ + path: `/cosmos/upgrade/v1beta1/module_versions`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryUpgradedConsensusState + * @summary UpgradedConsensusState queries the consensus state that will serve +as a trusted kernel for the next version of this chain. It will only be +stored at the last height of this chain. +UpgradedConsensusState RPC not supported with legacy querier +This rpc is deprecated now that IBC has its own replacement +(https://github.com/cosmos/ibc-go/blob/2c880a22e9f9cc75f62b527ca94aa75ce1106001/proto/ibc/core/client/v1/query.proto#L54) + * @request GET:/cosmos/upgrade/v1beta1/upgraded_consensus_state/{last_height} + */ + queryUpgradedConsensusState = (lastHeight: string, params: RequestParams = {}) => + this.request({ + path: `/cosmos/upgrade/v1beta1/upgraded_consensus_state/${lastHeight}`, + method: "GET", + format: "json", + ...params, + }); +} diff --git a/ts-client/cosmos.upgrade.v1beta1/types.ts b/ts-client/cosmos.upgrade.v1beta1/types.ts new file mode 100755 index 000000000..2e7c435ed --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/types.ts @@ -0,0 +1,13 @@ +import { Plan } from "./types/cosmos/upgrade/v1beta1/upgrade" +import { SoftwareUpgradeProposal } from "./types/cosmos/upgrade/v1beta1/upgrade" +import { CancelSoftwareUpgradeProposal } from "./types/cosmos/upgrade/v1beta1/upgrade" +import { ModuleVersion } from "./types/cosmos/upgrade/v1beta1/upgrade" + + +export { + Plan, + SoftwareUpgradeProposal, + CancelSoftwareUpgradeProposal, + ModuleVersion, + + } \ No newline at end of file diff --git a/ts-client/cosmos.upgrade.v1beta1/types/amino/amino.ts b/ts-client/cosmos.upgrade.v1beta1/types/amino/amino.ts new file mode 100644 index 000000000..4b67dcfe4 --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/types/amino/amino.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "amino"; diff --git a/ts-client/cosmos.upgrade.v1beta1/types/cosmos/msg/v1/msg.ts b/ts-client/cosmos.upgrade.v1beta1/types/cosmos/msg/v1/msg.ts new file mode 100644 index 000000000..f0ff44eb2 --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/types/cosmos/msg/v1/msg.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "cosmos.msg.v1"; diff --git a/ts-client/cosmos.upgrade.v1beta1/types/cosmos/upgrade/v1beta1/query.ts b/ts-client/cosmos.upgrade.v1beta1/types/cosmos/upgrade/v1beta1/query.ts new file mode 100644 index 000000000..1f8eedff9 --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/types/cosmos/upgrade/v1beta1/query.ts @@ -0,0 +1,728 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { ModuleVersion, Plan } from "./upgrade"; + +export const protobufPackage = "cosmos.upgrade.v1beta1"; + +/** + * QueryCurrentPlanRequest is the request type for the Query/CurrentPlan RPC + * method. + */ +export interface QueryCurrentPlanRequest { +} + +/** + * QueryCurrentPlanResponse is the response type for the Query/CurrentPlan RPC + * method. + */ +export interface QueryCurrentPlanResponse { + /** plan is the current upgrade plan. */ + plan: Plan | undefined; +} + +/** + * QueryCurrentPlanRequest is the request type for the Query/AppliedPlan RPC + * method. + */ +export interface QueryAppliedPlanRequest { + /** name is the name of the applied plan to query for. */ + name: string; +} + +/** + * QueryAppliedPlanResponse is the response type for the Query/AppliedPlan RPC + * method. + */ +export interface QueryAppliedPlanResponse { + /** height is the block height at which the plan was applied. */ + height: number; +} + +/** + * QueryUpgradedConsensusStateRequest is the request type for the Query/UpgradedConsensusState + * RPC method. + * + * @deprecated + */ +export interface QueryUpgradedConsensusStateRequest { + /** + * last height of the current chain must be sent in request + * as this is the height under which next consensus state is stored + */ + lastHeight: number; +} + +/** + * QueryUpgradedConsensusStateResponse is the response type for the Query/UpgradedConsensusState + * RPC method. + * + * @deprecated + */ +export interface QueryUpgradedConsensusStateResponse { + /** Since: cosmos-sdk 0.43 */ + upgradedConsensusState: Uint8Array; +} + +/** + * QueryModuleVersionsRequest is the request type for the Query/ModuleVersions + * RPC method. + * + * Since: cosmos-sdk 0.43 + */ +export interface QueryModuleVersionsRequest { + /** + * module_name is a field to query a specific module + * consensus version from state. Leaving this empty will + * fetch the full list of module versions from state + */ + moduleName: string; +} + +/** + * QueryModuleVersionsResponse is the response type for the Query/ModuleVersions + * RPC method. + * + * Since: cosmos-sdk 0.43 + */ +export interface QueryModuleVersionsResponse { + /** module_versions is a list of module names with their consensus versions. */ + moduleVersions: ModuleVersion[]; +} + +/** + * QueryAuthorityRequest is the request type for Query/Authority + * + * Since: cosmos-sdk 0.46 + */ +export interface QueryAuthorityRequest { +} + +/** + * QueryAuthorityResponse is the response type for Query/Authority + * + * Since: cosmos-sdk 0.46 + */ +export interface QueryAuthorityResponse { + address: string; +} + +function createBaseQueryCurrentPlanRequest(): QueryCurrentPlanRequest { + return {}; +} + +export const QueryCurrentPlanRequest = { + encode(_: QueryCurrentPlanRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCurrentPlanRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryCurrentPlanRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): QueryCurrentPlanRequest { + return {}; + }, + + toJSON(_: QueryCurrentPlanRequest): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): QueryCurrentPlanRequest { + const message = createBaseQueryCurrentPlanRequest(); + return message; + }, +}; + +function createBaseQueryCurrentPlanResponse(): QueryCurrentPlanResponse { + return { plan: undefined }; +} + +export const QueryCurrentPlanResponse = { + encode(message: QueryCurrentPlanResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.plan !== undefined) { + Plan.encode(message.plan, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryCurrentPlanResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryCurrentPlanResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.plan = Plan.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryCurrentPlanResponse { + return { plan: isSet(object.plan) ? Plan.fromJSON(object.plan) : undefined }; + }, + + toJSON(message: QueryCurrentPlanResponse): unknown { + const obj: any = {}; + message.plan !== undefined && (obj.plan = message.plan ? Plan.toJSON(message.plan) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryCurrentPlanResponse { + const message = createBaseQueryCurrentPlanResponse(); + message.plan = (object.plan !== undefined && object.plan !== null) ? Plan.fromPartial(object.plan) : undefined; + return message; + }, +}; + +function createBaseQueryAppliedPlanRequest(): QueryAppliedPlanRequest { + return { name: "" }; +} + +export const QueryAppliedPlanRequest = { + encode(message: QueryAppliedPlanRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAppliedPlanRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAppliedPlanRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAppliedPlanRequest { + return { name: isSet(object.name) ? String(object.name) : "" }; + }, + + toJSON(message: QueryAppliedPlanRequest): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + return obj; + }, + + fromPartial, I>>(object: I): QueryAppliedPlanRequest { + const message = createBaseQueryAppliedPlanRequest(); + message.name = object.name ?? ""; + return message; + }, +}; + +function createBaseQueryAppliedPlanResponse(): QueryAppliedPlanResponse { + return { height: 0 }; +} + +export const QueryAppliedPlanResponse = { + encode(message: QueryAppliedPlanResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.height !== 0) { + writer.uint32(8).int64(message.height); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAppliedPlanResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAppliedPlanResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.height = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAppliedPlanResponse { + return { height: isSet(object.height) ? Number(object.height) : 0 }; + }, + + toJSON(message: QueryAppliedPlanResponse): unknown { + const obj: any = {}; + message.height !== undefined && (obj.height = Math.round(message.height)); + return obj; + }, + + fromPartial, I>>(object: I): QueryAppliedPlanResponse { + const message = createBaseQueryAppliedPlanResponse(); + message.height = object.height ?? 0; + return message; + }, +}; + +function createBaseQueryUpgradedConsensusStateRequest(): QueryUpgradedConsensusStateRequest { + return { lastHeight: 0 }; +} + +export const QueryUpgradedConsensusStateRequest = { + encode(message: QueryUpgradedConsensusStateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.lastHeight !== 0) { + writer.uint32(8).int64(message.lastHeight); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUpgradedConsensusStateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUpgradedConsensusStateRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.lastHeight = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryUpgradedConsensusStateRequest { + return { lastHeight: isSet(object.lastHeight) ? Number(object.lastHeight) : 0 }; + }, + + toJSON(message: QueryUpgradedConsensusStateRequest): unknown { + const obj: any = {}; + message.lastHeight !== undefined && (obj.lastHeight = Math.round(message.lastHeight)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryUpgradedConsensusStateRequest { + const message = createBaseQueryUpgradedConsensusStateRequest(); + message.lastHeight = object.lastHeight ?? 0; + return message; + }, +}; + +function createBaseQueryUpgradedConsensusStateResponse(): QueryUpgradedConsensusStateResponse { + return { upgradedConsensusState: new Uint8Array() }; +} + +export const QueryUpgradedConsensusStateResponse = { + encode(message: QueryUpgradedConsensusStateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.upgradedConsensusState.length !== 0) { + writer.uint32(18).bytes(message.upgradedConsensusState); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryUpgradedConsensusStateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryUpgradedConsensusStateResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.upgradedConsensusState = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryUpgradedConsensusStateResponse { + return { + upgradedConsensusState: isSet(object.upgradedConsensusState) + ? bytesFromBase64(object.upgradedConsensusState) + : new Uint8Array(), + }; + }, + + toJSON(message: QueryUpgradedConsensusStateResponse): unknown { + const obj: any = {}; + message.upgradedConsensusState !== undefined + && (obj.upgradedConsensusState = base64FromBytes( + message.upgradedConsensusState !== undefined ? message.upgradedConsensusState : new Uint8Array(), + )); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryUpgradedConsensusStateResponse { + const message = createBaseQueryUpgradedConsensusStateResponse(); + message.upgradedConsensusState = object.upgradedConsensusState ?? new Uint8Array(); + return message; + }, +}; + +function createBaseQueryModuleVersionsRequest(): QueryModuleVersionsRequest { + return { moduleName: "" }; +} + +export const QueryModuleVersionsRequest = { + encode(message: QueryModuleVersionsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.moduleName !== "") { + writer.uint32(10).string(message.moduleName); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryModuleVersionsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryModuleVersionsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.moduleName = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryModuleVersionsRequest { + return { moduleName: isSet(object.moduleName) ? String(object.moduleName) : "" }; + }, + + toJSON(message: QueryModuleVersionsRequest): unknown { + const obj: any = {}; + message.moduleName !== undefined && (obj.moduleName = message.moduleName); + return obj; + }, + + fromPartial, I>>(object: I): QueryModuleVersionsRequest { + const message = createBaseQueryModuleVersionsRequest(); + message.moduleName = object.moduleName ?? ""; + return message; + }, +}; + +function createBaseQueryModuleVersionsResponse(): QueryModuleVersionsResponse { + return { moduleVersions: [] }; +} + +export const QueryModuleVersionsResponse = { + encode(message: QueryModuleVersionsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.moduleVersions) { + ModuleVersion.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryModuleVersionsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryModuleVersionsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.moduleVersions.push(ModuleVersion.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryModuleVersionsResponse { + return { + moduleVersions: Array.isArray(object?.moduleVersions) + ? object.moduleVersions.map((e: any) => ModuleVersion.fromJSON(e)) + : [], + }; + }, + + toJSON(message: QueryModuleVersionsResponse): unknown { + const obj: any = {}; + if (message.moduleVersions) { + obj.moduleVersions = message.moduleVersions.map((e) => e ? ModuleVersion.toJSON(e) : undefined); + } else { + obj.moduleVersions = []; + } + return obj; + }, + + fromPartial, I>>(object: I): QueryModuleVersionsResponse { + const message = createBaseQueryModuleVersionsResponse(); + message.moduleVersions = object.moduleVersions?.map((e) => ModuleVersion.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseQueryAuthorityRequest(): QueryAuthorityRequest { + return {}; +} + +export const QueryAuthorityRequest = { + encode(_: QueryAuthorityRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAuthorityRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAuthorityRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): QueryAuthorityRequest { + return {}; + }, + + toJSON(_: QueryAuthorityRequest): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): QueryAuthorityRequest { + const message = createBaseQueryAuthorityRequest(); + return message; + }, +}; + +function createBaseQueryAuthorityResponse(): QueryAuthorityResponse { + return { address: "" }; +} + +export const QueryAuthorityResponse = { + encode(message: QueryAuthorityResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAuthorityResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAuthorityResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAuthorityResponse { + return { address: isSet(object.address) ? String(object.address) : "" }; + }, + + toJSON(message: QueryAuthorityResponse): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + return obj; + }, + + fromPartial, I>>(object: I): QueryAuthorityResponse { + const message = createBaseQueryAuthorityResponse(); + message.address = object.address ?? ""; + return message; + }, +}; + +/** Query defines the gRPC upgrade querier service. */ +export interface Query { + /** CurrentPlan queries the current upgrade plan. */ + CurrentPlan(request: QueryCurrentPlanRequest): Promise; + /** AppliedPlan queries a previously applied upgrade plan by its name. */ + AppliedPlan(request: QueryAppliedPlanRequest): Promise; + /** + * UpgradedConsensusState queries the consensus state that will serve + * as a trusted kernel for the next version of this chain. It will only be + * stored at the last height of this chain. + * UpgradedConsensusState RPC not supported with legacy querier + * This rpc is deprecated now that IBC has its own replacement + * (https://github.com/cosmos/ibc-go/blob/2c880a22e9f9cc75f62b527ca94aa75ce1106001/proto/ibc/core/client/v1/query.proto#L54) + * + * @deprecated + */ + UpgradedConsensusState(request: QueryUpgradedConsensusStateRequest): Promise; + /** + * ModuleVersions queries the list of module versions from state. + * + * Since: cosmos-sdk 0.43 + */ + ModuleVersions(request: QueryModuleVersionsRequest): Promise; + /** + * Returns the account with authority to conduct upgrades + * + * Since: cosmos-sdk 0.46 + */ + Authority(request: QueryAuthorityRequest): Promise; +} + +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.CurrentPlan = this.CurrentPlan.bind(this); + this.AppliedPlan = this.AppliedPlan.bind(this); + this.UpgradedConsensusState = this.UpgradedConsensusState.bind(this); + this.ModuleVersions = this.ModuleVersions.bind(this); + this.Authority = this.Authority.bind(this); + } + CurrentPlan(request: QueryCurrentPlanRequest): Promise { + const data = QueryCurrentPlanRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.upgrade.v1beta1.Query", "CurrentPlan", data); + return promise.then((data) => QueryCurrentPlanResponse.decode(new _m0.Reader(data))); + } + + AppliedPlan(request: QueryAppliedPlanRequest): Promise { + const data = QueryAppliedPlanRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.upgrade.v1beta1.Query", "AppliedPlan", data); + return promise.then((data) => QueryAppliedPlanResponse.decode(new _m0.Reader(data))); + } + + UpgradedConsensusState(request: QueryUpgradedConsensusStateRequest): Promise { + const data = QueryUpgradedConsensusStateRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.upgrade.v1beta1.Query", "UpgradedConsensusState", data); + return promise.then((data) => QueryUpgradedConsensusStateResponse.decode(new _m0.Reader(data))); + } + + ModuleVersions(request: QueryModuleVersionsRequest): Promise { + const data = QueryModuleVersionsRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.upgrade.v1beta1.Query", "ModuleVersions", data); + return promise.then((data) => QueryModuleVersionsResponse.decode(new _m0.Reader(data))); + } + + Authority(request: QueryAuthorityRequest): Promise { + const data = QueryAuthorityRequest.encode(request).finish(); + const promise = this.rpc.request("cosmos.upgrade.v1beta1.Query", "Authority", data); + return promise.then((data) => QueryAuthorityResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.upgrade.v1beta1/types/cosmos/upgrade/v1beta1/tx.ts b/ts-client/cosmos.upgrade.v1beta1/types/cosmos/upgrade/v1beta1/tx.ts new file mode 100644 index 000000000..78d08a654 --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/types/cosmos/upgrade/v1beta1/tx.ts @@ -0,0 +1,284 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Plan } from "./upgrade"; + +export const protobufPackage = "cosmos.upgrade.v1beta1"; + +/** Since: cosmos-sdk 0.46 */ + +/** + * MsgSoftwareUpgrade is the Msg/SoftwareUpgrade request type. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgSoftwareUpgrade { + /** authority is the address that controls the module (defaults to x/gov unless overwritten). */ + authority: string; + /** plan is the upgrade plan. */ + plan: Plan | undefined; +} + +/** + * MsgSoftwareUpgradeResponse is the Msg/SoftwareUpgrade response type. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgSoftwareUpgradeResponse { +} + +/** + * MsgCancelUpgrade is the Msg/CancelUpgrade request type. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgCancelUpgrade { + /** authority is the address that controls the module (defaults to x/gov unless overwritten). */ + authority: string; +} + +/** + * MsgCancelUpgradeResponse is the Msg/CancelUpgrade response type. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgCancelUpgradeResponse { +} + +function createBaseMsgSoftwareUpgrade(): MsgSoftwareUpgrade { + return { authority: "", plan: undefined }; +} + +export const MsgSoftwareUpgrade = { + encode(message: MsgSoftwareUpgrade, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + if (message.plan !== undefined) { + Plan.encode(message.plan, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSoftwareUpgrade { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSoftwareUpgrade(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + case 2: + message.plan = Plan.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgSoftwareUpgrade { + return { + authority: isSet(object.authority) ? String(object.authority) : "", + plan: isSet(object.plan) ? Plan.fromJSON(object.plan) : undefined, + }; + }, + + toJSON(message: MsgSoftwareUpgrade): unknown { + const obj: any = {}; + message.authority !== undefined && (obj.authority = message.authority); + message.plan !== undefined && (obj.plan = message.plan ? Plan.toJSON(message.plan) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): MsgSoftwareUpgrade { + const message = createBaseMsgSoftwareUpgrade(); + message.authority = object.authority ?? ""; + message.plan = (object.plan !== undefined && object.plan !== null) ? Plan.fromPartial(object.plan) : undefined; + return message; + }, +}; + +function createBaseMsgSoftwareUpgradeResponse(): MsgSoftwareUpgradeResponse { + return {}; +} + +export const MsgSoftwareUpgradeResponse = { + encode(_: MsgSoftwareUpgradeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgSoftwareUpgradeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSoftwareUpgradeResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgSoftwareUpgradeResponse { + return {}; + }, + + toJSON(_: MsgSoftwareUpgradeResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgSoftwareUpgradeResponse { + const message = createBaseMsgSoftwareUpgradeResponse(); + return message; + }, +}; + +function createBaseMsgCancelUpgrade(): MsgCancelUpgrade { + return { authority: "" }; +} + +export const MsgCancelUpgrade = { + encode(message: MsgCancelUpgrade, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.authority !== "") { + writer.uint32(10).string(message.authority); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCancelUpgrade { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCancelUpgrade(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.authority = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgCancelUpgrade { + return { authority: isSet(object.authority) ? String(object.authority) : "" }; + }, + + toJSON(message: MsgCancelUpgrade): unknown { + const obj: any = {}; + message.authority !== undefined && (obj.authority = message.authority); + return obj; + }, + + fromPartial, I>>(object: I): MsgCancelUpgrade { + const message = createBaseMsgCancelUpgrade(); + message.authority = object.authority ?? ""; + return message; + }, +}; + +function createBaseMsgCancelUpgradeResponse(): MsgCancelUpgradeResponse { + return {}; +} + +export const MsgCancelUpgradeResponse = { + encode(_: MsgCancelUpgradeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCancelUpgradeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCancelUpgradeResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgCancelUpgradeResponse { + return {}; + }, + + toJSON(_: MsgCancelUpgradeResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgCancelUpgradeResponse { + const message = createBaseMsgCancelUpgradeResponse(); + return message; + }, +}; + +/** Msg defines the upgrade Msg service. */ +export interface Msg { + /** + * SoftwareUpgrade is a governance operation for initiating a software upgrade. + * + * Since: cosmos-sdk 0.46 + */ + SoftwareUpgrade(request: MsgSoftwareUpgrade): Promise; + /** + * CancelUpgrade is a governance operation for cancelling a previously + * approved software upgrade. + * + * Since: cosmos-sdk 0.46 + */ + CancelUpgrade(request: MsgCancelUpgrade): Promise; +} + +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.SoftwareUpgrade = this.SoftwareUpgrade.bind(this); + this.CancelUpgrade = this.CancelUpgrade.bind(this); + } + SoftwareUpgrade(request: MsgSoftwareUpgrade): Promise { + const data = MsgSoftwareUpgrade.encode(request).finish(); + const promise = this.rpc.request("cosmos.upgrade.v1beta1.Msg", "SoftwareUpgrade", data); + return promise.then((data) => MsgSoftwareUpgradeResponse.decode(new _m0.Reader(data))); + } + + CancelUpgrade(request: MsgCancelUpgrade): Promise { + const data = MsgCancelUpgrade.encode(request).finish(); + const promise = this.rpc.request("cosmos.upgrade.v1beta1.Msg", "CancelUpgrade", data); + return promise.then((data) => MsgCancelUpgradeResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.upgrade.v1beta1/types/cosmos/upgrade/v1beta1/upgrade.ts b/ts-client/cosmos.upgrade.v1beta1/types/cosmos/upgrade/v1beta1/upgrade.ts new file mode 100644 index 000000000..e578e0a38 --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/types/cosmos/upgrade/v1beta1/upgrade.ts @@ -0,0 +1,431 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Any } from "../../../google/protobuf/any"; +import { Timestamp } from "../../../google/protobuf/timestamp"; + +export const protobufPackage = "cosmos.upgrade.v1beta1"; + +/** Plan specifies information about a planned upgrade and when it should occur. */ +export interface Plan { + /** + * Sets the name for the upgrade. This name will be used by the upgraded + * version of the software to apply any special "on-upgrade" commands during + * the first BeginBlock method after the upgrade is applied. It is also used + * to detect whether a software version can handle a given upgrade. If no + * upgrade handler with this name has been set in the software, it will be + * assumed that the software is out-of-date when the upgrade Time or Height is + * reached and the software will exit. + */ + name: string; + /** + * Deprecated: Time based upgrades have been deprecated. Time based upgrade logic + * has been removed from the SDK. + * If this field is not empty, an error will be thrown. + * + * @deprecated + */ + time: + | Date + | undefined; + /** The height at which the upgrade must be performed. */ + height: number; + /** + * Any application specific upgrade info to be included on-chain + * such as a git commit that validators could automatically upgrade to + */ + info: string; + /** + * Deprecated: UpgradedClientState field has been deprecated. IBC upgrade logic has been + * moved to the IBC module in the sub module 02-client. + * If this field is not empty, an error will be thrown. + * + * @deprecated + */ + upgradedClientState: Any | undefined; +} + +/** + * SoftwareUpgradeProposal is a gov Content type for initiating a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgSoftwareUpgrade. + * + * @deprecated + */ +export interface SoftwareUpgradeProposal { + /** title of the proposal */ + title: string; + /** description of the proposal */ + description: string; + /** plan of the proposal */ + plan: Plan | undefined; +} + +/** + * CancelSoftwareUpgradeProposal is a gov Content type for cancelling a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgCancelUpgrade. + * + * @deprecated + */ +export interface CancelSoftwareUpgradeProposal { + /** title of the proposal */ + title: string; + /** description of the proposal */ + description: string; +} + +/** + * ModuleVersion specifies a module and its consensus version. + * + * Since: cosmos-sdk 0.43 + */ +export interface ModuleVersion { + /** name of the app module */ + name: string; + /** consensus version of the app module */ + version: number; +} + +function createBasePlan(): Plan { + return { name: "", time: undefined, height: 0, info: "", upgradedClientState: undefined }; +} + +export const Plan = { + encode(message: Plan, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.time !== undefined) { + Timestamp.encode(toTimestamp(message.time), writer.uint32(18).fork()).ldelim(); + } + if (message.height !== 0) { + writer.uint32(24).int64(message.height); + } + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + if (message.upgradedClientState !== undefined) { + Any.encode(message.upgradedClientState, writer.uint32(42).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Plan { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePlan(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.time = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 3: + message.height = longToNumber(reader.int64() as Long); + break; + case 4: + message.info = reader.string(); + break; + case 5: + message.upgradedClientState = Any.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Plan { + return { + name: isSet(object.name) ? String(object.name) : "", + time: isSet(object.time) ? fromJsonTimestamp(object.time) : undefined, + height: isSet(object.height) ? Number(object.height) : 0, + info: isSet(object.info) ? String(object.info) : "", + upgradedClientState: isSet(object.upgradedClientState) ? Any.fromJSON(object.upgradedClientState) : undefined, + }; + }, + + toJSON(message: Plan): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.time !== undefined && (obj.time = message.time.toISOString()); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.info !== undefined && (obj.info = message.info); + message.upgradedClientState !== undefined + && (obj.upgradedClientState = message.upgradedClientState ? Any.toJSON(message.upgradedClientState) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): Plan { + const message = createBasePlan(); + message.name = object.name ?? ""; + message.time = object.time ?? undefined; + message.height = object.height ?? 0; + message.info = object.info ?? ""; + message.upgradedClientState = (object.upgradedClientState !== undefined && object.upgradedClientState !== null) + ? Any.fromPartial(object.upgradedClientState) + : undefined; + return message; + }, +}; + +function createBaseSoftwareUpgradeProposal(): SoftwareUpgradeProposal { + return { title: "", description: "", plan: undefined }; +} + +export const SoftwareUpgradeProposal = { + encode(message: SoftwareUpgradeProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + if (message.plan !== undefined) { + Plan.encode(message.plan, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SoftwareUpgradeProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSoftwareUpgradeProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + message.plan = Plan.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SoftwareUpgradeProposal { + return { + title: isSet(object.title) ? String(object.title) : "", + description: isSet(object.description) ? String(object.description) : "", + plan: isSet(object.plan) ? Plan.fromJSON(object.plan) : undefined, + }; + }, + + toJSON(message: SoftwareUpgradeProposal): unknown { + const obj: any = {}; + message.title !== undefined && (obj.title = message.title); + message.description !== undefined && (obj.description = message.description); + message.plan !== undefined && (obj.plan = message.plan ? Plan.toJSON(message.plan) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): SoftwareUpgradeProposal { + const message = createBaseSoftwareUpgradeProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.plan = (object.plan !== undefined && object.plan !== null) ? Plan.fromPartial(object.plan) : undefined; + return message; + }, +}; + +function createBaseCancelSoftwareUpgradeProposal(): CancelSoftwareUpgradeProposal { + return { title: "", description: "" }; +} + +export const CancelSoftwareUpgradeProposal = { + encode(message: CancelSoftwareUpgradeProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CancelSoftwareUpgradeProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCancelSoftwareUpgradeProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CancelSoftwareUpgradeProposal { + return { + title: isSet(object.title) ? String(object.title) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: CancelSoftwareUpgradeProposal): unknown { + const obj: any = {}; + message.title !== undefined && (obj.title = message.title); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>( + object: I, + ): CancelSoftwareUpgradeProposal { + const message = createBaseCancelSoftwareUpgradeProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseModuleVersion(): ModuleVersion { + return { name: "", version: 0 }; +} + +export const ModuleVersion = { + encode(message: ModuleVersion, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.version !== 0) { + writer.uint32(16).uint64(message.version); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleVersion { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleVersion(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.version = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ModuleVersion { + return { + name: isSet(object.name) ? String(object.name) : "", + version: isSet(object.version) ? Number(object.version) : 0, + }; + }, + + toJSON(message: ModuleVersion): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.version !== undefined && (obj.version = Math.round(message.version)); + return obj; + }, + + fromPartial, I>>(object: I): ModuleVersion { + const message = createBaseModuleVersion(); + message.name = object.name ?? ""; + message.version = object.version ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function toTimestamp(date: Date): Timestamp { + const seconds = date.getTime() / 1_000; + const nanos = (date.getTime() % 1_000) * 1_000_000; + return { seconds, nanos }; +} + +function fromTimestamp(t: Timestamp): Date { + let millis = t.seconds * 1_000; + millis += t.nanos / 1_000_000; + return new Date(millis); +} + +function fromJsonTimestamp(o: any): Date { + if (o instanceof Date) { + return o; + } else if (typeof o === "string") { + return new Date(o); + } else { + return fromTimestamp(Timestamp.fromJSON(o)); + } +} + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.upgrade.v1beta1/types/cosmos_proto/cosmos.ts b/ts-client/cosmos.upgrade.v1beta1/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..79c8d3486 --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/types/cosmos_proto/cosmos.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.upgrade.v1beta1/types/gogoproto/gogo.ts b/ts-client/cosmos.upgrade.v1beta1/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/cosmos.upgrade.v1beta1/types/google/api/annotations.ts b/ts-client/cosmos.upgrade.v1beta1/types/google/api/annotations.ts new file mode 100644 index 000000000..aace4787f --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/types/google/api/annotations.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "google.api"; diff --git a/ts-client/cosmos.upgrade.v1beta1/types/google/api/http.ts b/ts-client/cosmos.upgrade.v1beta1/types/google/api/http.ts new file mode 100644 index 000000000..17e770d15 --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/types/google/api/http.ts @@ -0,0 +1,589 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get: + | string + | undefined; + /** Used for updating a resource. */ + put: + | string + | undefined; + /** Used for creating a resource. */ + post: + | string + | undefined; + /** Used for deleting a resource. */ + delete: + | string + | undefined; + /** Used for updating a resource. */ + patch: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined + && (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined + && (obj.custom = message.custom ? CustomHttpPattern.toJSON(message.custom) : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.additionalBindings = []; + } + return obj; + }, + + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.upgrade.v1beta1/types/google/protobuf/any.ts b/ts-client/cosmos.upgrade.v1beta1/types/google/protobuf/any.ts new file mode 100644 index 000000000..c4ebf38be --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/types/google/protobuf/any.ts @@ -0,0 +1,240 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := anypb.New(foo) + * if err != nil { + * ... + * } + * ... + * foo := &pb.Foo{} + * if err := any.UnmarshalTo(foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface Any { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + typeUrl: string; + /** Must be a valid serialized protocol buffer of the above specified type. */ + value: Uint8Array; +} + +function createBaseAny(): Any { + return { typeUrl: "", value: new Uint8Array() }; +} + +export const Any = { + encode(message: Any, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.typeUrl !== "") { + writer.uint32(10).string(message.typeUrl); + } + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Any { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAny(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.typeUrl = reader.string(); + break; + case 2: + message.value = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Any { + return { + typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "", + value: isSet(object.value) ? bytesFromBase64(object.value) : new Uint8Array(), + }; + }, + + toJSON(message: Any): unknown { + const obj: any = {}; + message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl); + message.value !== undefined + && (obj.value = base64FromBytes(message.value !== undefined ? message.value : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): Any { + const message = createBaseAny(); + message.typeUrl = object.typeUrl ?? ""; + message.value = object.value ?? new Uint8Array(); + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.upgrade.v1beta1/types/google/protobuf/descriptor.ts b/ts-client/cosmos.upgrade.v1beta1/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.upgrade.v1beta1/types/google/protobuf/timestamp.ts b/ts-client/cosmos.upgrade.v1beta1/types/google/protobuf/timestamp.ts new file mode 100644 index 000000000..b00bb1b72 --- /dev/null +++ b/ts-client/cosmos.upgrade.v1beta1/types/google/protobuf/timestamp.ts @@ -0,0 +1,216 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * Example 5: Compute Timestamp from Java `Instant.now()`. + * + * Instant now = Instant.now(); + * + * Timestamp timestamp = + * Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + * .setNanos(now.getNano()).build(); + * + * Example 6: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ +export interface Timestamp { + /** + * Represents seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + */ + seconds: number; + /** + * Non-negative fractions of a second at nanosecond resolution. Negative + * second values with fractions must still have non-negative nanos values + * that count forward in time. Must be from 0 to 999,999,999 + * inclusive. + */ + nanos: number; +} + +function createBaseTimestamp(): Timestamp { + return { seconds: 0, nanos: 0 }; +} + +export const Timestamp = { + encode(message: Timestamp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.seconds !== 0) { + writer.uint32(8).int64(message.seconds); + } + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Timestamp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTimestamp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.seconds = longToNumber(reader.int64() as Long); + break; + case 2: + message.nanos = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Timestamp { + return { + seconds: isSet(object.seconds) ? Number(object.seconds) : 0, + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; + }, + + toJSON(message: Timestamp): unknown { + const obj: any = {}; + message.seconds !== undefined && (obj.seconds = Math.round(message.seconds)); + message.nanos !== undefined && (obj.nanos = Math.round(message.nanos)); + return obj; + }, + + fromPartial, I>>(object: I): Timestamp { + const message = createBaseTimestamp(); + message.seconds = object.seconds ?? 0; + message.nanos = object.nanos ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.vesting.v1beta1/index.ts b/ts-client/cosmos.vesting.v1beta1/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/cosmos.vesting.v1beta1/module.ts b/ts-client/cosmos.vesting.v1beta1/module.ts new file mode 100755 index 000000000..1a77b0457 --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/module.ts @@ -0,0 +1,207 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; +import { MsgCreatePeriodicVestingAccount } from "./types/cosmos/vesting/v1beta1/tx"; +import { MsgCreateVestingAccount } from "./types/cosmos/vesting/v1beta1/tx"; +import { MsgCreatePermanentLockedAccount } from "./types/cosmos/vesting/v1beta1/tx"; + +import { BaseVestingAccount as typeBaseVestingAccount} from "./types" +import { ContinuousVestingAccount as typeContinuousVestingAccount} from "./types" +import { DelayedVestingAccount as typeDelayedVestingAccount} from "./types" +import { Period as typePeriod} from "./types" +import { PeriodicVestingAccount as typePeriodicVestingAccount} from "./types" +import { PermanentLockedAccount as typePermanentLockedAccount} from "./types" + +export { MsgCreatePeriodicVestingAccount, MsgCreateVestingAccount, MsgCreatePermanentLockedAccount }; + +type sendMsgCreatePeriodicVestingAccountParams = { + value: MsgCreatePeriodicVestingAccount, + fee?: StdFee, + memo?: string +}; + +type sendMsgCreateVestingAccountParams = { + value: MsgCreateVestingAccount, + fee?: StdFee, + memo?: string +}; + +type sendMsgCreatePermanentLockedAccountParams = { + value: MsgCreatePermanentLockedAccount, + fee?: StdFee, + memo?: string +}; + + +type msgCreatePeriodicVestingAccountParams = { + value: MsgCreatePeriodicVestingAccount, +}; + +type msgCreateVestingAccountParams = { + value: MsgCreateVestingAccount, +}; + +type msgCreatePermanentLockedAccountParams = { + value: MsgCreatePermanentLockedAccount, +}; + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + async sendMsgCreatePeriodicVestingAccount({ value, fee, memo }: sendMsgCreatePeriodicVestingAccountParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgCreatePeriodicVestingAccount: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgCreatePeriodicVestingAccount({ value: MsgCreatePeriodicVestingAccount.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgCreatePeriodicVestingAccount: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgCreateVestingAccount({ value, fee, memo }: sendMsgCreateVestingAccountParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgCreateVestingAccount: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgCreateVestingAccount({ value: MsgCreateVestingAccount.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgCreateVestingAccount: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgCreatePermanentLockedAccount({ value, fee, memo }: sendMsgCreatePermanentLockedAccountParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgCreatePermanentLockedAccount: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgCreatePermanentLockedAccount({ value: MsgCreatePermanentLockedAccount.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgCreatePermanentLockedAccount: Could not broadcast Tx: '+ e.message) + } + }, + + + msgCreatePeriodicVestingAccount({ value }: msgCreatePeriodicVestingAccountParams): EncodeObject { + try { + return { typeUrl: "/cosmos.vesting.v1beta1.MsgCreatePeriodicVestingAccount", value: MsgCreatePeriodicVestingAccount.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgCreatePeriodicVestingAccount: Could not create message: ' + e.message) + } + }, + + msgCreateVestingAccount({ value }: msgCreateVestingAccountParams): EncodeObject { + try { + return { typeUrl: "/cosmos.vesting.v1beta1.MsgCreateVestingAccount", value: MsgCreateVestingAccount.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgCreateVestingAccount: Could not create message: ' + e.message) + } + }, + + msgCreatePermanentLockedAccount({ value }: msgCreatePermanentLockedAccountParams): EncodeObject { + try { + return { typeUrl: "/cosmos.vesting.v1beta1.MsgCreatePermanentLockedAccount", value: MsgCreatePermanentLockedAccount.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgCreatePermanentLockedAccount: Could not create message: ' + e.message) + } + }, + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + BaseVestingAccount: getStructure(typeBaseVestingAccount.fromPartial({})), + ContinuousVestingAccount: getStructure(typeContinuousVestingAccount.fromPartial({})), + DelayedVestingAccount: getStructure(typeDelayedVestingAccount.fromPartial({})), + Period: getStructure(typePeriod.fromPartial({})), + PeriodicVestingAccount: getStructure(typePeriodicVestingAccount.fromPartial({})), + PermanentLockedAccount: getStructure(typePermanentLockedAccount.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + CosmosVestingV1Beta1: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/cosmos.vesting.v1beta1/registry.ts b/ts-client/cosmos.vesting.v1beta1/registry.ts new file mode 100755 index 000000000..ad4a8121c --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/registry.ts @@ -0,0 +1,13 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; +import { MsgCreatePeriodicVestingAccount } from "./types/cosmos/vesting/v1beta1/tx"; +import { MsgCreateVestingAccount } from "./types/cosmos/vesting/v1beta1/tx"; +import { MsgCreatePermanentLockedAccount } from "./types/cosmos/vesting/v1beta1/tx"; + +const msgTypes: Array<[string, GeneratedType]> = [ + ["/cosmos.vesting.v1beta1.MsgCreatePeriodicVestingAccount", MsgCreatePeriodicVestingAccount], + ["/cosmos.vesting.v1beta1.MsgCreateVestingAccount", MsgCreateVestingAccount], + ["/cosmos.vesting.v1beta1.MsgCreatePermanentLockedAccount", MsgCreatePermanentLockedAccount], + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/cosmos.vesting.v1beta1/rest.ts b/ts-client/cosmos.vesting.v1beta1/rest.ts new file mode 100644 index 000000000..ef66f59d9 --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/rest.ts @@ -0,0 +1,300 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +/** +* `Any` contains an arbitrary serialized protocol buffer message along with a +URL that describes the type of the serialized message. + +Protobuf library provides support to pack/unpack Any values in the form +of utility functions or additional generated methods of the Any type. + +Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + +Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + +The pack methods provided by protobuf library will by default use +'type.googleapis.com/full.type.name' as the type URL and the unpack +methods only use the fully qualified type name after the last '/' +in the type URL, for example "foo.bar.com/x/y.z" will yield type +name "y.z". + + +JSON +==== +The JSON representation of an `Any` value uses the regular +representation of the deserialized, embedded message, with an +additional field `@type` which contains the type URL. Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + +If the embedded message type is well-known and has a custom JSON +representation, that representation will be embedded adding a field +`value` which holds the custom JSON in addition to the `@type` +field. Example (for message [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } +*/ +export interface ProtobufAny { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +/** +* Coin defines a token with a denomination and an amount. + +NOTE: The amount field is an Int which implements the custom method +signatures required by gogoproto. +*/ +export interface V1Beta1Coin { + denom?: string; + amount?: string; +} + +/** +* MsgCreateVestingAccountResponse defines the Msg/CreatePeriodicVestingAccount +response type. + +Since: cosmos-sdk 0.46 +*/ +export type V1Beta1MsgCreatePeriodicVestingAccountResponse = object; + +/** +* MsgCreatePermanentLockedAccountResponse defines the Msg/CreatePermanentLockedAccount response type. + +Since: cosmos-sdk 0.46 +*/ +export type V1Beta1MsgCreatePermanentLockedAccountResponse = object; + +/** + * MsgCreateVestingAccountResponse defines the Msg/CreateVestingAccount response type. + */ +export type V1Beta1MsgCreateVestingAccountResponse = object; + +/** + * Period defines a length of time and amount of coins that will vest. + */ +export interface V1Beta1Period { + /** + * Period duration in seconds. + * @format int64 + */ + length?: string; + amount?: V1Beta1Coin[]; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title cosmos/vesting/v1beta1/tx.proto + * @version version not set + */ +export class Api extends HttpClient {} diff --git a/ts-client/cosmos.vesting.v1beta1/types.ts b/ts-client/cosmos.vesting.v1beta1/types.ts new file mode 100755 index 000000000..cc45b951d --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/types.ts @@ -0,0 +1,17 @@ +import { BaseVestingAccount } from "./types/cosmos/vesting/v1beta1/vesting" +import { ContinuousVestingAccount } from "./types/cosmos/vesting/v1beta1/vesting" +import { DelayedVestingAccount } from "./types/cosmos/vesting/v1beta1/vesting" +import { Period } from "./types/cosmos/vesting/v1beta1/vesting" +import { PeriodicVestingAccount } from "./types/cosmos/vesting/v1beta1/vesting" +import { PermanentLockedAccount } from "./types/cosmos/vesting/v1beta1/vesting" + + +export { + BaseVestingAccount, + ContinuousVestingAccount, + DelayedVestingAccount, + Period, + PeriodicVestingAccount, + PermanentLockedAccount, + + } \ No newline at end of file diff --git a/ts-client/cosmos.vesting.v1beta1/types/amino/amino.ts b/ts-client/cosmos.vesting.v1beta1/types/amino/amino.ts new file mode 100644 index 000000000..4b67dcfe4 --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/types/amino/amino.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "amino"; diff --git a/ts-client/cosmos.vesting.v1beta1/types/cosmos/auth/v1beta1/auth.ts b/ts-client/cosmos.vesting.v1beta1/types/cosmos/auth/v1beta1/auth.ts new file mode 100644 index 000000000..a37ece488 --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/types/cosmos/auth/v1beta1/auth.ts @@ -0,0 +1,428 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Any } from "../../../google/protobuf/any"; + +export const protobufPackage = "cosmos.auth.v1beta1"; + +/** + * BaseAccount defines a base account type. It contains all the necessary fields + * for basic account functionality. Any custom account type should extend this + * type for additional functionality (e.g. vesting). + */ +export interface BaseAccount { + address: string; + pubKey: Any | undefined; + accountNumber: number; + sequence: number; +} + +/** ModuleAccount defines an account for modules that holds coins on a pool. */ +export interface ModuleAccount { + baseAccount: BaseAccount | undefined; + name: string; + permissions: string[]; +} + +/** + * ModuleCredential represents a unclaimable pubkey for base accounts controlled by modules. + * + * Since: cosmos-sdk 0.47 + */ +export interface ModuleCredential { + /** module_name is the name of the module used for address derivation (passed into address.Module). */ + moduleName: string; + /** + * derivation_keys is for deriving a module account address (passed into address.Module) + * adding more keys creates sub-account addresses (passed into address.Derive) + */ + derivationKeys: Uint8Array[]; +} + +/** Params defines the parameters for the auth module. */ +export interface Params { + maxMemoCharacters: number; + txSigLimit: number; + txSizeCostPerByte: number; + sigVerifyCostEd25519: number; + sigVerifyCostSecp256k1: number; +} + +function createBaseBaseAccount(): BaseAccount { + return { address: "", pubKey: undefined, accountNumber: 0, sequence: 0 }; +} + +export const BaseAccount = { + encode(message: BaseAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + if (message.pubKey !== undefined) { + Any.encode(message.pubKey, writer.uint32(18).fork()).ldelim(); + } + if (message.accountNumber !== 0) { + writer.uint32(24).uint64(message.accountNumber); + } + if (message.sequence !== 0) { + writer.uint32(32).uint64(message.sequence); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BaseAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBaseAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + case 2: + message.pubKey = Any.decode(reader, reader.uint32()); + break; + case 3: + message.accountNumber = longToNumber(reader.uint64() as Long); + break; + case 4: + message.sequence = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): BaseAccount { + return { + address: isSet(object.address) ? String(object.address) : "", + pubKey: isSet(object.pubKey) ? Any.fromJSON(object.pubKey) : undefined, + accountNumber: isSet(object.accountNumber) ? Number(object.accountNumber) : 0, + sequence: isSet(object.sequence) ? Number(object.sequence) : 0, + }; + }, + + toJSON(message: BaseAccount): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + message.pubKey !== undefined && (obj.pubKey = message.pubKey ? Any.toJSON(message.pubKey) : undefined); + message.accountNumber !== undefined && (obj.accountNumber = Math.round(message.accountNumber)); + message.sequence !== undefined && (obj.sequence = Math.round(message.sequence)); + return obj; + }, + + fromPartial, I>>(object: I): BaseAccount { + const message = createBaseBaseAccount(); + message.address = object.address ?? ""; + message.pubKey = (object.pubKey !== undefined && object.pubKey !== null) + ? Any.fromPartial(object.pubKey) + : undefined; + message.accountNumber = object.accountNumber ?? 0; + message.sequence = object.sequence ?? 0; + return message; + }, +}; + +function createBaseModuleAccount(): ModuleAccount { + return { baseAccount: undefined, name: "", permissions: [] }; +} + +export const ModuleAccount = { + encode(message: ModuleAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseAccount !== undefined) { + BaseAccount.encode(message.baseAccount, writer.uint32(10).fork()).ldelim(); + } + if (message.name !== "") { + writer.uint32(18).string(message.name); + } + for (const v of message.permissions) { + writer.uint32(26).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.baseAccount = BaseAccount.decode(reader, reader.uint32()); + break; + case 2: + message.name = reader.string(); + break; + case 3: + message.permissions.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ModuleAccount { + return { + baseAccount: isSet(object.baseAccount) ? BaseAccount.fromJSON(object.baseAccount) : undefined, + name: isSet(object.name) ? String(object.name) : "", + permissions: Array.isArray(object?.permissions) ? object.permissions.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: ModuleAccount): unknown { + const obj: any = {}; + message.baseAccount !== undefined + && (obj.baseAccount = message.baseAccount ? BaseAccount.toJSON(message.baseAccount) : undefined); + message.name !== undefined && (obj.name = message.name); + if (message.permissions) { + obj.permissions = message.permissions.map((e) => e); + } else { + obj.permissions = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ModuleAccount { + const message = createBaseModuleAccount(); + message.baseAccount = (object.baseAccount !== undefined && object.baseAccount !== null) + ? BaseAccount.fromPartial(object.baseAccount) + : undefined; + message.name = object.name ?? ""; + message.permissions = object.permissions?.map((e) => e) || []; + return message; + }, +}; + +function createBaseModuleCredential(): ModuleCredential { + return { moduleName: "", derivationKeys: [] }; +} + +export const ModuleCredential = { + encode(message: ModuleCredential, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.moduleName !== "") { + writer.uint32(10).string(message.moduleName); + } + for (const v of message.derivationKeys) { + writer.uint32(18).bytes(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleCredential { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleCredential(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.moduleName = reader.string(); + break; + case 2: + message.derivationKeys.push(reader.bytes()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ModuleCredential { + return { + moduleName: isSet(object.moduleName) ? String(object.moduleName) : "", + derivationKeys: Array.isArray(object?.derivationKeys) + ? object.derivationKeys.map((e: any) => bytesFromBase64(e)) + : [], + }; + }, + + toJSON(message: ModuleCredential): unknown { + const obj: any = {}; + message.moduleName !== undefined && (obj.moduleName = message.moduleName); + if (message.derivationKeys) { + obj.derivationKeys = message.derivationKeys.map((e) => base64FromBytes(e !== undefined ? e : new Uint8Array())); + } else { + obj.derivationKeys = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ModuleCredential { + const message = createBaseModuleCredential(); + message.moduleName = object.moduleName ?? ""; + message.derivationKeys = object.derivationKeys?.map((e) => e) || []; + return message; + }, +}; + +function createBaseParams(): Params { + return { + maxMemoCharacters: 0, + txSigLimit: 0, + txSizeCostPerByte: 0, + sigVerifyCostEd25519: 0, + sigVerifyCostSecp256k1: 0, + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.maxMemoCharacters !== 0) { + writer.uint32(8).uint64(message.maxMemoCharacters); + } + if (message.txSigLimit !== 0) { + writer.uint32(16).uint64(message.txSigLimit); + } + if (message.txSizeCostPerByte !== 0) { + writer.uint32(24).uint64(message.txSizeCostPerByte); + } + if (message.sigVerifyCostEd25519 !== 0) { + writer.uint32(32).uint64(message.sigVerifyCostEd25519); + } + if (message.sigVerifyCostSecp256k1 !== 0) { + writer.uint32(40).uint64(message.sigVerifyCostSecp256k1); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.maxMemoCharacters = longToNumber(reader.uint64() as Long); + break; + case 2: + message.txSigLimit = longToNumber(reader.uint64() as Long); + break; + case 3: + message.txSizeCostPerByte = longToNumber(reader.uint64() as Long); + break; + case 4: + message.sigVerifyCostEd25519 = longToNumber(reader.uint64() as Long); + break; + case 5: + message.sigVerifyCostSecp256k1 = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Params { + return { + maxMemoCharacters: isSet(object.maxMemoCharacters) ? Number(object.maxMemoCharacters) : 0, + txSigLimit: isSet(object.txSigLimit) ? Number(object.txSigLimit) : 0, + txSizeCostPerByte: isSet(object.txSizeCostPerByte) ? Number(object.txSizeCostPerByte) : 0, + sigVerifyCostEd25519: isSet(object.sigVerifyCostEd25519) ? Number(object.sigVerifyCostEd25519) : 0, + sigVerifyCostSecp256k1: isSet(object.sigVerifyCostSecp256k1) ? Number(object.sigVerifyCostSecp256k1) : 0, + }; + }, + + toJSON(message: Params): unknown { + const obj: any = {}; + message.maxMemoCharacters !== undefined && (obj.maxMemoCharacters = Math.round(message.maxMemoCharacters)); + message.txSigLimit !== undefined && (obj.txSigLimit = Math.round(message.txSigLimit)); + message.txSizeCostPerByte !== undefined && (obj.txSizeCostPerByte = Math.round(message.txSizeCostPerByte)); + message.sigVerifyCostEd25519 !== undefined && (obj.sigVerifyCostEd25519 = Math.round(message.sigVerifyCostEd25519)); + message.sigVerifyCostSecp256k1 !== undefined + && (obj.sigVerifyCostSecp256k1 = Math.round(message.sigVerifyCostSecp256k1)); + return obj; + }, + + fromPartial, I>>(object: I): Params { + const message = createBaseParams(); + message.maxMemoCharacters = object.maxMemoCharacters ?? 0; + message.txSigLimit = object.txSigLimit ?? 0; + message.txSizeCostPerByte = object.txSizeCostPerByte ?? 0; + message.sigVerifyCostEd25519 = object.sigVerifyCostEd25519 ?? 0; + message.sigVerifyCostSecp256k1 = object.sigVerifyCostSecp256k1 ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.vesting.v1beta1/types/cosmos/base/v1beta1/coin.ts b/ts-client/cosmos.vesting.v1beta1/types/cosmos/base/v1beta1/coin.ts new file mode 100644 index 000000000..b28eec03f --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/types/cosmos/base/v1beta1/coin.ts @@ -0,0 +1,261 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.base.v1beta1"; + +/** + * Coin defines a token with a denomination and an amount. + * + * NOTE: The amount field is an Int which implements the custom method + * signatures required by gogoproto. + */ +export interface Coin { + denom: string; + amount: string; +} + +/** + * DecCoin defines a token with a denomination and a decimal amount. + * + * NOTE: The amount field is an Dec which implements the custom method + * signatures required by gogoproto. + */ +export interface DecCoin { + denom: string; + amount: string; +} + +/** IntProto defines a Protobuf wrapper around an Int object. */ +export interface IntProto { + int: string; +} + +/** DecProto defines a Protobuf wrapper around a Dec object. */ +export interface DecProto { + dec: string; +} + +function createBaseCoin(): Coin { + return { denom: "", amount: "" }; +} + +export const Coin = { + encode(message: Coin, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.denom !== "") { + writer.uint32(10).string(message.denom); + } + if (message.amount !== "") { + writer.uint32(18).string(message.amount); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Coin { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCoin(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.denom = reader.string(); + break; + case 2: + message.amount = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Coin { + return { + denom: isSet(object.denom) ? String(object.denom) : "", + amount: isSet(object.amount) ? String(object.amount) : "", + }; + }, + + toJSON(message: Coin): unknown { + const obj: any = {}; + message.denom !== undefined && (obj.denom = message.denom); + message.amount !== undefined && (obj.amount = message.amount); + return obj; + }, + + fromPartial, I>>(object: I): Coin { + const message = createBaseCoin(); + message.denom = object.denom ?? ""; + message.amount = object.amount ?? ""; + return message; + }, +}; + +function createBaseDecCoin(): DecCoin { + return { denom: "", amount: "" }; +} + +export const DecCoin = { + encode(message: DecCoin, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.denom !== "") { + writer.uint32(10).string(message.denom); + } + if (message.amount !== "") { + writer.uint32(18).string(message.amount); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecCoin { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecCoin(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.denom = reader.string(); + break; + case 2: + message.amount = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DecCoin { + return { + denom: isSet(object.denom) ? String(object.denom) : "", + amount: isSet(object.amount) ? String(object.amount) : "", + }; + }, + + toJSON(message: DecCoin): unknown { + const obj: any = {}; + message.denom !== undefined && (obj.denom = message.denom); + message.amount !== undefined && (obj.amount = message.amount); + return obj; + }, + + fromPartial, I>>(object: I): DecCoin { + const message = createBaseDecCoin(); + message.denom = object.denom ?? ""; + message.amount = object.amount ?? ""; + return message; + }, +}; + +function createBaseIntProto(): IntProto { + return { int: "" }; +} + +export const IntProto = { + encode(message: IntProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.int !== "") { + writer.uint32(10).string(message.int); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): IntProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseIntProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.int = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): IntProto { + return { int: isSet(object.int) ? String(object.int) : "" }; + }, + + toJSON(message: IntProto): unknown { + const obj: any = {}; + message.int !== undefined && (obj.int = message.int); + return obj; + }, + + fromPartial, I>>(object: I): IntProto { + const message = createBaseIntProto(); + message.int = object.int ?? ""; + return message; + }, +}; + +function createBaseDecProto(): DecProto { + return { dec: "" }; +} + +export const DecProto = { + encode(message: DecProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.dec !== "") { + writer.uint32(10).string(message.dec); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DecProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.dec = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DecProto { + return { dec: isSet(object.dec) ? String(object.dec) : "" }; + }, + + toJSON(message: DecProto): unknown { + const obj: any = {}; + message.dec !== undefined && (obj.dec = message.dec); + return obj; + }, + + fromPartial, I>>(object: I): DecProto { + const message = createBaseDecProto(); + message.dec = object.dec ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.vesting.v1beta1/types/cosmos/msg/v1/msg.ts b/ts-client/cosmos.vesting.v1beta1/types/cosmos/msg/v1/msg.ts new file mode 100644 index 000000000..f0ff44eb2 --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/types/cosmos/msg/v1/msg.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "cosmos.msg.v1"; diff --git a/ts-client/cosmos.vesting.v1beta1/types/cosmos/vesting/v1beta1/tx.ts b/ts-client/cosmos.vesting.v1beta1/types/cosmos/vesting/v1beta1/tx.ts new file mode 100644 index 000000000..c58f36047 --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/types/cosmos/vesting/v1beta1/tx.ts @@ -0,0 +1,542 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Coin } from "../../base/v1beta1/coin"; +import { Period } from "./vesting"; + +export const protobufPackage = "cosmos.vesting.v1beta1"; + +/** + * MsgCreateVestingAccount defines a message that enables creating a vesting + * account. + */ +export interface MsgCreateVestingAccount { + fromAddress: string; + toAddress: string; + amount: Coin[]; + /** end of vesting as unix time (in seconds). */ + endTime: number; + delayed: boolean; +} + +/** MsgCreateVestingAccountResponse defines the Msg/CreateVestingAccount response type. */ +export interface MsgCreateVestingAccountResponse { +} + +/** + * MsgCreatePermanentLockedAccount defines a message that enables creating a permanent + * locked account. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgCreatePermanentLockedAccount { + fromAddress: string; + toAddress: string; + amount: Coin[]; +} + +/** + * MsgCreatePermanentLockedAccountResponse defines the Msg/CreatePermanentLockedAccount response type. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgCreatePermanentLockedAccountResponse { +} + +/** + * MsgCreateVestingAccount defines a message that enables creating a vesting + * account. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgCreatePeriodicVestingAccount { + fromAddress: string; + toAddress: string; + /** start of vesting as unix time (in seconds). */ + startTime: number; + vestingPeriods: Period[]; +} + +/** + * MsgCreateVestingAccountResponse defines the Msg/CreatePeriodicVestingAccount + * response type. + * + * Since: cosmos-sdk 0.46 + */ +export interface MsgCreatePeriodicVestingAccountResponse { +} + +function createBaseMsgCreateVestingAccount(): MsgCreateVestingAccount { + return { fromAddress: "", toAddress: "", amount: [], endTime: 0, delayed: false }; +} + +export const MsgCreateVestingAccount = { + encode(message: MsgCreateVestingAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fromAddress !== "") { + writer.uint32(10).string(message.fromAddress); + } + if (message.toAddress !== "") { + writer.uint32(18).string(message.toAddress); + } + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(26).fork()).ldelim(); + } + if (message.endTime !== 0) { + writer.uint32(32).int64(message.endTime); + } + if (message.delayed === true) { + writer.uint32(40).bool(message.delayed); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateVestingAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateVestingAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.fromAddress = reader.string(); + break; + case 2: + message.toAddress = reader.string(); + break; + case 3: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + case 4: + message.endTime = longToNumber(reader.int64() as Long); + break; + case 5: + message.delayed = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgCreateVestingAccount { + return { + fromAddress: isSet(object.fromAddress) ? String(object.fromAddress) : "", + toAddress: isSet(object.toAddress) ? String(object.toAddress) : "", + amount: Array.isArray(object?.amount) ? object.amount.map((e: any) => Coin.fromJSON(e)) : [], + endTime: isSet(object.endTime) ? Number(object.endTime) : 0, + delayed: isSet(object.delayed) ? Boolean(object.delayed) : false, + }; + }, + + toJSON(message: MsgCreateVestingAccount): unknown { + const obj: any = {}; + message.fromAddress !== undefined && (obj.fromAddress = message.fromAddress); + message.toAddress !== undefined && (obj.toAddress = message.toAddress); + if (message.amount) { + obj.amount = message.amount.map((e) => e ? Coin.toJSON(e) : undefined); + } else { + obj.amount = []; + } + message.endTime !== undefined && (obj.endTime = Math.round(message.endTime)); + message.delayed !== undefined && (obj.delayed = message.delayed); + return obj; + }, + + fromPartial, I>>(object: I): MsgCreateVestingAccount { + const message = createBaseMsgCreateVestingAccount(); + message.fromAddress = object.fromAddress ?? ""; + message.toAddress = object.toAddress ?? ""; + message.amount = object.amount?.map((e) => Coin.fromPartial(e)) || []; + message.endTime = object.endTime ?? 0; + message.delayed = object.delayed ?? false; + return message; + }, +}; + +function createBaseMsgCreateVestingAccountResponse(): MsgCreateVestingAccountResponse { + return {}; +} + +export const MsgCreateVestingAccountResponse = { + encode(_: MsgCreateVestingAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateVestingAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateVestingAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgCreateVestingAccountResponse { + return {}; + }, + + toJSON(_: MsgCreateVestingAccountResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgCreateVestingAccountResponse { + const message = createBaseMsgCreateVestingAccountResponse(); + return message; + }, +}; + +function createBaseMsgCreatePermanentLockedAccount(): MsgCreatePermanentLockedAccount { + return { fromAddress: "", toAddress: "", amount: [] }; +} + +export const MsgCreatePermanentLockedAccount = { + encode(message: MsgCreatePermanentLockedAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fromAddress !== "") { + writer.uint32(10).string(message.fromAddress); + } + if (message.toAddress !== "") { + writer.uint32(18).string(message.toAddress); + } + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreatePermanentLockedAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreatePermanentLockedAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.fromAddress = reader.string(); + break; + case 2: + message.toAddress = reader.string(); + break; + case 3: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgCreatePermanentLockedAccount { + return { + fromAddress: isSet(object.fromAddress) ? String(object.fromAddress) : "", + toAddress: isSet(object.toAddress) ? String(object.toAddress) : "", + amount: Array.isArray(object?.amount) ? object.amount.map((e: any) => Coin.fromJSON(e)) : [], + }; + }, + + toJSON(message: MsgCreatePermanentLockedAccount): unknown { + const obj: any = {}; + message.fromAddress !== undefined && (obj.fromAddress = message.fromAddress); + message.toAddress !== undefined && (obj.toAddress = message.toAddress); + if (message.amount) { + obj.amount = message.amount.map((e) => e ? Coin.toJSON(e) : undefined); + } else { + obj.amount = []; + } + return obj; + }, + + fromPartial, I>>( + object: I, + ): MsgCreatePermanentLockedAccount { + const message = createBaseMsgCreatePermanentLockedAccount(); + message.fromAddress = object.fromAddress ?? ""; + message.toAddress = object.toAddress ?? ""; + message.amount = object.amount?.map((e) => Coin.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMsgCreatePermanentLockedAccountResponse(): MsgCreatePermanentLockedAccountResponse { + return {}; +} + +export const MsgCreatePermanentLockedAccountResponse = { + encode(_: MsgCreatePermanentLockedAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreatePermanentLockedAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreatePermanentLockedAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgCreatePermanentLockedAccountResponse { + return {}; + }, + + toJSON(_: MsgCreatePermanentLockedAccountResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): MsgCreatePermanentLockedAccountResponse { + const message = createBaseMsgCreatePermanentLockedAccountResponse(); + return message; + }, +}; + +function createBaseMsgCreatePeriodicVestingAccount(): MsgCreatePeriodicVestingAccount { + return { fromAddress: "", toAddress: "", startTime: 0, vestingPeriods: [] }; +} + +export const MsgCreatePeriodicVestingAccount = { + encode(message: MsgCreatePeriodicVestingAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.fromAddress !== "") { + writer.uint32(10).string(message.fromAddress); + } + if (message.toAddress !== "") { + writer.uint32(18).string(message.toAddress); + } + if (message.startTime !== 0) { + writer.uint32(24).int64(message.startTime); + } + for (const v of message.vestingPeriods) { + Period.encode(v!, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreatePeriodicVestingAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreatePeriodicVestingAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.fromAddress = reader.string(); + break; + case 2: + message.toAddress = reader.string(); + break; + case 3: + message.startTime = longToNumber(reader.int64() as Long); + break; + case 4: + message.vestingPeriods.push(Period.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgCreatePeriodicVestingAccount { + return { + fromAddress: isSet(object.fromAddress) ? String(object.fromAddress) : "", + toAddress: isSet(object.toAddress) ? String(object.toAddress) : "", + startTime: isSet(object.startTime) ? Number(object.startTime) : 0, + vestingPeriods: Array.isArray(object?.vestingPeriods) + ? object.vestingPeriods.map((e: any) => Period.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MsgCreatePeriodicVestingAccount): unknown { + const obj: any = {}; + message.fromAddress !== undefined && (obj.fromAddress = message.fromAddress); + message.toAddress !== undefined && (obj.toAddress = message.toAddress); + message.startTime !== undefined && (obj.startTime = Math.round(message.startTime)); + if (message.vestingPeriods) { + obj.vestingPeriods = message.vestingPeriods.map((e) => e ? Period.toJSON(e) : undefined); + } else { + obj.vestingPeriods = []; + } + return obj; + }, + + fromPartial, I>>( + object: I, + ): MsgCreatePeriodicVestingAccount { + const message = createBaseMsgCreatePeriodicVestingAccount(); + message.fromAddress = object.fromAddress ?? ""; + message.toAddress = object.toAddress ?? ""; + message.startTime = object.startTime ?? 0; + message.vestingPeriods = object.vestingPeriods?.map((e) => Period.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMsgCreatePeriodicVestingAccountResponse(): MsgCreatePeriodicVestingAccountResponse { + return {}; +} + +export const MsgCreatePeriodicVestingAccountResponse = { + encode(_: MsgCreatePeriodicVestingAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreatePeriodicVestingAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreatePeriodicVestingAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgCreatePeriodicVestingAccountResponse { + return {}; + }, + + toJSON(_: MsgCreatePeriodicVestingAccountResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): MsgCreatePeriodicVestingAccountResponse { + const message = createBaseMsgCreatePeriodicVestingAccountResponse(); + return message; + }, +}; + +/** Msg defines the bank Msg service. */ +export interface Msg { + /** + * CreateVestingAccount defines a method that enables creating a vesting + * account. + */ + CreateVestingAccount(request: MsgCreateVestingAccount): Promise; + /** + * CreatePermanentLockedAccount defines a method that enables creating a permanent + * locked account. + * + * Since: cosmos-sdk 0.46 + */ + CreatePermanentLockedAccount( + request: MsgCreatePermanentLockedAccount, + ): Promise; + /** + * CreatePeriodicVestingAccount defines a method that enables creating a + * periodic vesting account. + * + * Since: cosmos-sdk 0.46 + */ + CreatePeriodicVestingAccount( + request: MsgCreatePeriodicVestingAccount, + ): Promise; +} + +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.CreateVestingAccount = this.CreateVestingAccount.bind(this); + this.CreatePermanentLockedAccount = this.CreatePermanentLockedAccount.bind(this); + this.CreatePeriodicVestingAccount = this.CreatePeriodicVestingAccount.bind(this); + } + CreateVestingAccount(request: MsgCreateVestingAccount): Promise { + const data = MsgCreateVestingAccount.encode(request).finish(); + const promise = this.rpc.request("cosmos.vesting.v1beta1.Msg", "CreateVestingAccount", data); + return promise.then((data) => MsgCreateVestingAccountResponse.decode(new _m0.Reader(data))); + } + + CreatePermanentLockedAccount( + request: MsgCreatePermanentLockedAccount, + ): Promise { + const data = MsgCreatePermanentLockedAccount.encode(request).finish(); + const promise = this.rpc.request("cosmos.vesting.v1beta1.Msg", "CreatePermanentLockedAccount", data); + return promise.then((data) => MsgCreatePermanentLockedAccountResponse.decode(new _m0.Reader(data))); + } + + CreatePeriodicVestingAccount( + request: MsgCreatePeriodicVestingAccount, + ): Promise { + const data = MsgCreatePeriodicVestingAccount.encode(request).finish(); + const promise = this.rpc.request("cosmos.vesting.v1beta1.Msg", "CreatePeriodicVestingAccount", data); + return promise.then((data) => MsgCreatePeriodicVestingAccountResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.vesting.v1beta1/types/cosmos/vesting/v1beta1/vesting.ts b/ts-client/cosmos.vesting.v1beta1/types/cosmos/vesting/v1beta1/vesting.ts new file mode 100644 index 000000000..77b7bcc19 --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/types/cosmos/vesting/v1beta1/vesting.ts @@ -0,0 +1,534 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { BaseAccount } from "../../auth/v1beta1/auth"; +import { Coin } from "../../base/v1beta1/coin"; + +export const protobufPackage = "cosmos.vesting.v1beta1"; + +/** + * BaseVestingAccount implements the VestingAccount interface. It contains all + * the necessary fields needed for any vesting account implementation. + */ +export interface BaseVestingAccount { + baseAccount: BaseAccount | undefined; + originalVesting: Coin[]; + delegatedFree: Coin[]; + delegatedVesting: Coin[]; + /** Vesting end time, as unix timestamp (in seconds). */ + endTime: number; +} + +/** + * ContinuousVestingAccount implements the VestingAccount interface. It + * continuously vests by unlocking coins linearly with respect to time. + */ +export interface ContinuousVestingAccount { + baseVestingAccount: + | BaseVestingAccount + | undefined; + /** Vesting start time, as unix timestamp (in seconds). */ + startTime: number; +} + +/** + * DelayedVestingAccount implements the VestingAccount interface. It vests all + * coins after a specific time, but non prior. In other words, it keeps them + * locked until a specified time. + */ +export interface DelayedVestingAccount { + baseVestingAccount: BaseVestingAccount | undefined; +} + +/** Period defines a length of time and amount of coins that will vest. */ +export interface Period { + /** Period duration in seconds. */ + length: number; + amount: Coin[]; +} + +/** + * PeriodicVestingAccount implements the VestingAccount interface. It + * periodically vests by unlocking coins during each specified period. + */ +export interface PeriodicVestingAccount { + baseVestingAccount: BaseVestingAccount | undefined; + startTime: number; + vestingPeriods: Period[]; +} + +/** + * PermanentLockedAccount implements the VestingAccount interface. It does + * not ever release coins, locking them indefinitely. Coins in this account can + * still be used for delegating and for governance votes even while locked. + * + * Since: cosmos-sdk 0.43 + */ +export interface PermanentLockedAccount { + baseVestingAccount: BaseVestingAccount | undefined; +} + +function createBaseBaseVestingAccount(): BaseVestingAccount { + return { baseAccount: undefined, originalVesting: [], delegatedFree: [], delegatedVesting: [], endTime: 0 }; +} + +export const BaseVestingAccount = { + encode(message: BaseVestingAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseAccount !== undefined) { + BaseAccount.encode(message.baseAccount, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.originalVesting) { + Coin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.delegatedFree) { + Coin.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.delegatedVesting) { + Coin.encode(v!, writer.uint32(34).fork()).ldelim(); + } + if (message.endTime !== 0) { + writer.uint32(40).int64(message.endTime); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BaseVestingAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBaseVestingAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.baseAccount = BaseAccount.decode(reader, reader.uint32()); + break; + case 2: + message.originalVesting.push(Coin.decode(reader, reader.uint32())); + break; + case 3: + message.delegatedFree.push(Coin.decode(reader, reader.uint32())); + break; + case 4: + message.delegatedVesting.push(Coin.decode(reader, reader.uint32())); + break; + case 5: + message.endTime = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): BaseVestingAccount { + return { + baseAccount: isSet(object.baseAccount) ? BaseAccount.fromJSON(object.baseAccount) : undefined, + originalVesting: Array.isArray(object?.originalVesting) + ? object.originalVesting.map((e: any) => Coin.fromJSON(e)) + : [], + delegatedFree: Array.isArray(object?.delegatedFree) ? object.delegatedFree.map((e: any) => Coin.fromJSON(e)) : [], + delegatedVesting: Array.isArray(object?.delegatedVesting) + ? object.delegatedVesting.map((e: any) => Coin.fromJSON(e)) + : [], + endTime: isSet(object.endTime) ? Number(object.endTime) : 0, + }; + }, + + toJSON(message: BaseVestingAccount): unknown { + const obj: any = {}; + message.baseAccount !== undefined + && (obj.baseAccount = message.baseAccount ? BaseAccount.toJSON(message.baseAccount) : undefined); + if (message.originalVesting) { + obj.originalVesting = message.originalVesting.map((e) => e ? Coin.toJSON(e) : undefined); + } else { + obj.originalVesting = []; + } + if (message.delegatedFree) { + obj.delegatedFree = message.delegatedFree.map((e) => e ? Coin.toJSON(e) : undefined); + } else { + obj.delegatedFree = []; + } + if (message.delegatedVesting) { + obj.delegatedVesting = message.delegatedVesting.map((e) => e ? Coin.toJSON(e) : undefined); + } else { + obj.delegatedVesting = []; + } + message.endTime !== undefined && (obj.endTime = Math.round(message.endTime)); + return obj; + }, + + fromPartial, I>>(object: I): BaseVestingAccount { + const message = createBaseBaseVestingAccount(); + message.baseAccount = (object.baseAccount !== undefined && object.baseAccount !== null) + ? BaseAccount.fromPartial(object.baseAccount) + : undefined; + message.originalVesting = object.originalVesting?.map((e) => Coin.fromPartial(e)) || []; + message.delegatedFree = object.delegatedFree?.map((e) => Coin.fromPartial(e)) || []; + message.delegatedVesting = object.delegatedVesting?.map((e) => Coin.fromPartial(e)) || []; + message.endTime = object.endTime ?? 0; + return message; + }, +}; + +function createBaseContinuousVestingAccount(): ContinuousVestingAccount { + return { baseVestingAccount: undefined, startTime: 0 }; +} + +export const ContinuousVestingAccount = { + encode(message: ContinuousVestingAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseVestingAccount !== undefined) { + BaseVestingAccount.encode(message.baseVestingAccount, writer.uint32(10).fork()).ldelim(); + } + if (message.startTime !== 0) { + writer.uint32(16).int64(message.startTime); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ContinuousVestingAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseContinuousVestingAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.baseVestingAccount = BaseVestingAccount.decode(reader, reader.uint32()); + break; + case 2: + message.startTime = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ContinuousVestingAccount { + return { + baseVestingAccount: isSet(object.baseVestingAccount) + ? BaseVestingAccount.fromJSON(object.baseVestingAccount) + : undefined, + startTime: isSet(object.startTime) ? Number(object.startTime) : 0, + }; + }, + + toJSON(message: ContinuousVestingAccount): unknown { + const obj: any = {}; + message.baseVestingAccount !== undefined && (obj.baseVestingAccount = message.baseVestingAccount + ? BaseVestingAccount.toJSON(message.baseVestingAccount) + : undefined); + message.startTime !== undefined && (obj.startTime = Math.round(message.startTime)); + return obj; + }, + + fromPartial, I>>(object: I): ContinuousVestingAccount { + const message = createBaseContinuousVestingAccount(); + message.baseVestingAccount = (object.baseVestingAccount !== undefined && object.baseVestingAccount !== null) + ? BaseVestingAccount.fromPartial(object.baseVestingAccount) + : undefined; + message.startTime = object.startTime ?? 0; + return message; + }, +}; + +function createBaseDelayedVestingAccount(): DelayedVestingAccount { + return { baseVestingAccount: undefined }; +} + +export const DelayedVestingAccount = { + encode(message: DelayedVestingAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseVestingAccount !== undefined) { + BaseVestingAccount.encode(message.baseVestingAccount, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DelayedVestingAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDelayedVestingAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.baseVestingAccount = BaseVestingAccount.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DelayedVestingAccount { + return { + baseVestingAccount: isSet(object.baseVestingAccount) + ? BaseVestingAccount.fromJSON(object.baseVestingAccount) + : undefined, + }; + }, + + toJSON(message: DelayedVestingAccount): unknown { + const obj: any = {}; + message.baseVestingAccount !== undefined && (obj.baseVestingAccount = message.baseVestingAccount + ? BaseVestingAccount.toJSON(message.baseVestingAccount) + : undefined); + return obj; + }, + + fromPartial, I>>(object: I): DelayedVestingAccount { + const message = createBaseDelayedVestingAccount(); + message.baseVestingAccount = (object.baseVestingAccount !== undefined && object.baseVestingAccount !== null) + ? BaseVestingAccount.fromPartial(object.baseVestingAccount) + : undefined; + return message; + }, +}; + +function createBasePeriod(): Period { + return { length: 0, amount: [] }; +} + +export const Period = { + encode(message: Period, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.length !== 0) { + writer.uint32(8).int64(message.length); + } + for (const v of message.amount) { + Coin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Period { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePeriod(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.length = longToNumber(reader.int64() as Long); + break; + case 2: + message.amount.push(Coin.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Period { + return { + length: isSet(object.length) ? Number(object.length) : 0, + amount: Array.isArray(object?.amount) ? object.amount.map((e: any) => Coin.fromJSON(e)) : [], + }; + }, + + toJSON(message: Period): unknown { + const obj: any = {}; + message.length !== undefined && (obj.length = Math.round(message.length)); + if (message.amount) { + obj.amount = message.amount.map((e) => e ? Coin.toJSON(e) : undefined); + } else { + obj.amount = []; + } + return obj; + }, + + fromPartial, I>>(object: I): Period { + const message = createBasePeriod(); + message.length = object.length ?? 0; + message.amount = object.amount?.map((e) => Coin.fromPartial(e)) || []; + return message; + }, +}; + +function createBasePeriodicVestingAccount(): PeriodicVestingAccount { + return { baseVestingAccount: undefined, startTime: 0, vestingPeriods: [] }; +} + +export const PeriodicVestingAccount = { + encode(message: PeriodicVestingAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseVestingAccount !== undefined) { + BaseVestingAccount.encode(message.baseVestingAccount, writer.uint32(10).fork()).ldelim(); + } + if (message.startTime !== 0) { + writer.uint32(16).int64(message.startTime); + } + for (const v of message.vestingPeriods) { + Period.encode(v!, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PeriodicVestingAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePeriodicVestingAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.baseVestingAccount = BaseVestingAccount.decode(reader, reader.uint32()); + break; + case 2: + message.startTime = longToNumber(reader.int64() as Long); + break; + case 3: + message.vestingPeriods.push(Period.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PeriodicVestingAccount { + return { + baseVestingAccount: isSet(object.baseVestingAccount) + ? BaseVestingAccount.fromJSON(object.baseVestingAccount) + : undefined, + startTime: isSet(object.startTime) ? Number(object.startTime) : 0, + vestingPeriods: Array.isArray(object?.vestingPeriods) + ? object.vestingPeriods.map((e: any) => Period.fromJSON(e)) + : [], + }; + }, + + toJSON(message: PeriodicVestingAccount): unknown { + const obj: any = {}; + message.baseVestingAccount !== undefined && (obj.baseVestingAccount = message.baseVestingAccount + ? BaseVestingAccount.toJSON(message.baseVestingAccount) + : undefined); + message.startTime !== undefined && (obj.startTime = Math.round(message.startTime)); + if (message.vestingPeriods) { + obj.vestingPeriods = message.vestingPeriods.map((e) => e ? Period.toJSON(e) : undefined); + } else { + obj.vestingPeriods = []; + } + return obj; + }, + + fromPartial, I>>(object: I): PeriodicVestingAccount { + const message = createBasePeriodicVestingAccount(); + message.baseVestingAccount = (object.baseVestingAccount !== undefined && object.baseVestingAccount !== null) + ? BaseVestingAccount.fromPartial(object.baseVestingAccount) + : undefined; + message.startTime = object.startTime ?? 0; + message.vestingPeriods = object.vestingPeriods?.map((e) => Period.fromPartial(e)) || []; + return message; + }, +}; + +function createBasePermanentLockedAccount(): PermanentLockedAccount { + return { baseVestingAccount: undefined }; +} + +export const PermanentLockedAccount = { + encode(message: PermanentLockedAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseVestingAccount !== undefined) { + BaseVestingAccount.encode(message.baseVestingAccount, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PermanentLockedAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePermanentLockedAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.baseVestingAccount = BaseVestingAccount.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PermanentLockedAccount { + return { + baseVestingAccount: isSet(object.baseVestingAccount) + ? BaseVestingAccount.fromJSON(object.baseVestingAccount) + : undefined, + }; + }, + + toJSON(message: PermanentLockedAccount): unknown { + const obj: any = {}; + message.baseVestingAccount !== undefined && (obj.baseVestingAccount = message.baseVestingAccount + ? BaseVestingAccount.toJSON(message.baseVestingAccount) + : undefined); + return obj; + }, + + fromPartial, I>>(object: I): PermanentLockedAccount { + const message = createBasePermanentLockedAccount(); + message.baseVestingAccount = (object.baseVestingAccount !== undefined && object.baseVestingAccount !== null) + ? BaseVestingAccount.fromPartial(object.baseVestingAccount) + : undefined; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.vesting.v1beta1/types/cosmos_proto/cosmos.ts b/ts-client/cosmos.vesting.v1beta1/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..79c8d3486 --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/types/cosmos_proto/cosmos.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.vesting.v1beta1/types/gogoproto/gogo.ts b/ts-client/cosmos.vesting.v1beta1/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/cosmos.vesting.v1beta1/types/google/protobuf/any.ts b/ts-client/cosmos.vesting.v1beta1/types/google/protobuf/any.ts new file mode 100644 index 000000000..c4ebf38be --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/types/google/protobuf/any.ts @@ -0,0 +1,240 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := anypb.New(foo) + * if err != nil { + * ... + * } + * ... + * foo := &pb.Foo{} + * if err := any.UnmarshalTo(foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface Any { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + typeUrl: string; + /** Must be a valid serialized protocol buffer of the above specified type. */ + value: Uint8Array; +} + +function createBaseAny(): Any { + return { typeUrl: "", value: new Uint8Array() }; +} + +export const Any = { + encode(message: Any, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.typeUrl !== "") { + writer.uint32(10).string(message.typeUrl); + } + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Any { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAny(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.typeUrl = reader.string(); + break; + case 2: + message.value = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Any { + return { + typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "", + value: isSet(object.value) ? bytesFromBase64(object.value) : new Uint8Array(), + }; + }, + + toJSON(message: Any): unknown { + const obj: any = {}; + message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl); + message.value !== undefined + && (obj.value = base64FromBytes(message.value !== undefined ? message.value : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): Any { + const message = createBaseAny(); + message.typeUrl = object.typeUrl ?? ""; + message.value = object.value ?? new Uint8Array(); + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/cosmos.vesting.v1beta1/types/google/protobuf/descriptor.ts b/ts-client/cosmos.vesting.v1beta1/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/cosmos.vesting.v1beta1/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/env.ts b/ts-client/env.ts new file mode 100755 index 000000000..92558e4be --- /dev/null +++ b/ts-client/env.ts @@ -0,0 +1,7 @@ +import { OfflineSigner } from "@cosmjs/proto-signing"; + +export interface Env { + apiURL: string + rpcURL: string + prefix?: string +} \ No newline at end of file diff --git a/ts-client/helpers.ts b/ts-client/helpers.ts new file mode 100755 index 000000000..d8ccd3d0b --- /dev/null +++ b/ts-client/helpers.ts @@ -0,0 +1,32 @@ +export type Constructor = new (...args: any[]) => T; + +export type AnyFunction = (...args: any) => any; + +export type UnionToIntersection = + (Union extends any + ? (argument: Union) => void + : never + ) extends (argument: infer Intersection) => void + ? Intersection + : never; + +export type Return = + T extends AnyFunction + ? ReturnType + : T extends AnyFunction[] + ? UnionToIntersection> + : never + + +export const MissingWalletError = new Error("wallet is required"); + +export function getStructure(template) { + let structure = { fields: [] as Array} + for (const [key, value] of Object.entries(template)) { + let field: any = {} + field.name = key + field.type = typeof value + structure.fields.push(field) + } + return structure +} \ No newline at end of file diff --git a/ts-client/index.ts b/ts-client/index.ts new file mode 100755 index 000000000..19f95351b --- /dev/null +++ b/ts-client/index.ts @@ -0,0 +1,49 @@ +// Generated by Ignite ignite.com/cli +import { Registry } from '@cosmjs/proto-signing' +import { IgniteClient } from "./client"; +import { MissingWalletError } from "./helpers"; +import { Module as CosmosAuthV1Beta1, msgTypes as CosmosAuthV1Beta1MsgTypes } from './cosmos.auth.v1beta1' +import { Module as CosmosBaseTendermintV1Beta1, msgTypes as CosmosBaseTendermintV1Beta1MsgTypes } from './cosmos.base.tendermint.v1beta1' +import { Module as CosmosConsensusV1, msgTypes as CosmosConsensusV1MsgTypes } from './cosmos.consensus.v1' +import { Module as CosmosParamsV1Beta1, msgTypes as CosmosParamsV1Beta1MsgTypes } from './cosmos.params.v1beta1' +import { Module as CosmosTxV1Beta1, msgTypes as CosmosTxV1Beta1MsgTypes } from './cosmos.tx.v1beta1' +import { Module as CosmosUpgradeV1Beta1, msgTypes as CosmosUpgradeV1Beta1MsgTypes } from './cosmos.upgrade.v1beta1' +import { Module as CosmosVestingV1Beta1, msgTypes as CosmosVestingV1Beta1MsgTypes } from './cosmos.vesting.v1beta1' +import { Module as ZigbeeallianceDistributedcomplianceledgerCompliance, msgTypes as ZigbeeallianceDistributedcomplianceledgerComplianceMsgTypes } from './zigbeealliance.distributedcomplianceledger.compliance' +import { Module as ZigbeeallianceDistributedcomplianceledgerDclauth, msgTypes as ZigbeeallianceDistributedcomplianceledgerDclauthMsgTypes } from './zigbeealliance.distributedcomplianceledger.dclauth' +import { Module as ZigbeeallianceDistributedcomplianceledgerDclgenutil, msgTypes as ZigbeeallianceDistributedcomplianceledgerDclgenutilMsgTypes } from './zigbeealliance.distributedcomplianceledger.dclgenutil' +import { Module as ZigbeeallianceDistributedcomplianceledgerDclupgrade, msgTypes as ZigbeeallianceDistributedcomplianceledgerDclupgradeMsgTypes } from './zigbeealliance.distributedcomplianceledger.dclupgrade' +import { Module as ZigbeeallianceDistributedcomplianceledgerModel, msgTypes as ZigbeeallianceDistributedcomplianceledgerModelMsgTypes } from './zigbeealliance.distributedcomplianceledger.model' +import { Module as ZigbeeallianceDistributedcomplianceledgerPki, msgTypes as ZigbeeallianceDistributedcomplianceledgerPkiMsgTypes } from './zigbeealliance.distributedcomplianceledger.pki' +import { Module as ZigbeeallianceDistributedcomplianceledgerValidator, msgTypes as ZigbeeallianceDistributedcomplianceledgerValidatorMsgTypes } from './zigbeealliance.distributedcomplianceledger.validator' +import { Module as ZigbeeallianceDistributedcomplianceledgerVendorinfo, msgTypes as ZigbeeallianceDistributedcomplianceledgerVendorinfoMsgTypes } from './zigbeealliance.distributedcomplianceledger.vendorinfo' + + +const Client = IgniteClient.plugin([ + CosmosAuthV1Beta1, CosmosBaseTendermintV1Beta1, CosmosConsensusV1, CosmosParamsV1Beta1, CosmosTxV1Beta1, CosmosUpgradeV1Beta1, CosmosVestingV1Beta1, ZigbeeallianceDistributedcomplianceledgerCompliance, ZigbeeallianceDistributedcomplianceledgerDclauth, ZigbeeallianceDistributedcomplianceledgerDclgenutil, ZigbeeallianceDistributedcomplianceledgerDclupgrade, ZigbeeallianceDistributedcomplianceledgerModel, ZigbeeallianceDistributedcomplianceledgerPki, ZigbeeallianceDistributedcomplianceledgerValidator, ZigbeeallianceDistributedcomplianceledgerVendorinfo +]); + +const registry = new Registry([ + ...CosmosAuthV1Beta1MsgTypes, + ...CosmosBaseTendermintV1Beta1MsgTypes, + ...CosmosConsensusV1MsgTypes, + ...CosmosParamsV1Beta1MsgTypes, + ...CosmosTxV1Beta1MsgTypes, + ...CosmosUpgradeV1Beta1MsgTypes, + ...CosmosVestingV1Beta1MsgTypes, + ...ZigbeeallianceDistributedcomplianceledgerComplianceMsgTypes, + ...ZigbeeallianceDistributedcomplianceledgerDclauthMsgTypes, + ...ZigbeeallianceDistributedcomplianceledgerDclgenutilMsgTypes, + ...ZigbeeallianceDistributedcomplianceledgerDclupgradeMsgTypes, + ...ZigbeeallianceDistributedcomplianceledgerModelMsgTypes, + ...ZigbeeallianceDistributedcomplianceledgerPkiMsgTypes, + ...ZigbeeallianceDistributedcomplianceledgerValidatorMsgTypes, + ...ZigbeeallianceDistributedcomplianceledgerVendorinfoMsgTypes, + +]) + +export { + Client, + registry, + MissingWalletError +} diff --git a/ts-client/modules.ts b/ts-client/modules.ts new file mode 100755 index 000000000..634b83c12 --- /dev/null +++ b/ts-client/modules.ts @@ -0,0 +1,5 @@ +import { IgniteClient } from "./client"; +import { GeneratedType } from "@cosmjs/proto-signing"; + +export type ModuleInterface = { [key: string]: any } +export type Module = (instance: IgniteClient) => { module: ModuleInterface, registry: [string, GeneratedType][] } diff --git a/ts-client/package.json b/ts-client/package.json new file mode 100755 index 000000000..a3b48ec51 --- /dev/null +++ b/ts-client/package.json @@ -0,0 +1,35 @@ +{ + "name": "zigbee-alliance-distributed-compliance-ledger-client-ts", + "version": "0.0.1", + "description": "Autogenerated Typescript Client", + "author": "Ignite Codegen ", + "license": "Apache-2.0", + "licenses": [ + { + "type": "Apache-2.0", + "url": "http://www.apache.org/licenses/LICENSE-2.0" + } + ], + "main": "index.ts", + "publishConfig": { + "access": "public" + }, + "dependencies": { + "@cosmjs/launchpad": "0.27.0", + "@cosmjs/proto-signing": "0.27.0", + "@cosmjs/stargate": "0.27.0", + "@keplr-wallet/types": "^0.11.3", + "axios": "0.21.4", + "buffer": "^6.0.3", + "events": "^3.3.0" + }, + "peerDependencies": { + "@cosmjs/launchpad": "0.27.0", + "@cosmjs/proto-signing": "0.27.0", + "@cosmjs/stargate": "0.27.0" + }, + "devDependencies": { + "@types/events": "^3.0.0", + "typescript": "^4.8.4" + } +} diff --git a/ts-client/tsconfig.json b/ts-client/tsconfig.json new file mode 100755 index 000000000..5e4ca7118 --- /dev/null +++ b/ts-client/tsconfig.json @@ -0,0 +1,12 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "ES2020", + "moduleResolution": "node", + "outDir": "./lib", + "allowSyntheticDefaultImports": true, + "esModuleInterop": false, + "strict": false, + "skipLibCheck": true + } + } \ No newline at end of file diff --git a/ts-client/types.d.ts b/ts-client/types.d.ts new file mode 100755 index 000000000..268332bcb --- /dev/null +++ b/ts-client/types.d.ts @@ -0,0 +1,21 @@ +import { Keplr, Window as KeplrWindow } from '@keplr-wallet/types'; + +declare global { + interface KeplrIntereactionOptions { + readonly sign?: KeplrSignOptions; + } + + export interface KeplrSignOptions { + readonly preferNoSetFee?: boolean; + readonly preferNoSetMemo?: boolean; + readonly disableBalanceCheck?: boolean; + } + interface CustomKeplr extends Keplr { + enable(chainId: string | string[]): Promise; + + defaultOptions: KeplrIntereactionOptions; + } + interface Window extends KeplrWindow { + keplr: CustomKeplr; + } +} \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/api.swagger.yml b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/api.swagger.yml new file mode 100644 index 000000000..3b939d603 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/api.swagger.yml @@ -0,0 +1,1687 @@ +swagger: '2.0' +info: + title: HTTP API Console zigbeealliance.distributedcomplianceledger.compliance + name: '' + description: '' +paths: + /dcl/compliance/certified-models: + get: + operationId: CertifiedModelAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + certifiedModel: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/compliance/certified-models/{vid}/{pid}/{softwareVersion}/{certificationType}: + get: + operationId: CertifiedModel + responses: + '200': + description: A successful response. + schema: + type: object + properties: + certifiedModel: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: vid + in: path + required: true + type: integer + format: int32 + - name: pid + in: path + required: true + type: integer + format: int32 + - name: softwareVersion + in: path + required: true + type: integer + format: int64 + - name: certificationType + in: path + required: true + type: string + tags: + - Query + /dcl/compliance/compliance-info: + get: + operationId: ComplianceInfoAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + complianceInfo: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + softwareVersionString: + type: string + cDVersionNumber: + type: integer + format: int64 + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + owner: + type: string + history: + type: array + items: + type: object + properties: + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + cDVersionNumber: + type: integer + format: int64 + cDCertificateId: + type: string + certificationRoute: + type: string + programType: + type: string + programTypeVersion: + type: string + compliantPlatformUsed: + type: string + compliantPlatformVersion: + type: string + transport: + type: string + familyId: + type: string + supportedClusters: + type: string + OSVersion: + type: string + parentChild: + type: string + certificationIdOfSoftwareComponent: + type: string + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/compliance/compliance-info/{vid}/{pid}/{softwareVersion}/{certificationType}: + get: + operationId: ComplianceInfo + responses: + '200': + description: A successful response. + schema: + type: object + properties: + complianceInfo: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + softwareVersionString: + type: string + cDVersionNumber: + type: integer + format: int64 + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + owner: + type: string + history: + type: array + items: + type: object + properties: + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + cDVersionNumber: + type: integer + format: int64 + cDCertificateId: + type: string + certificationRoute: + type: string + programType: + type: string + programTypeVersion: + type: string + compliantPlatformUsed: + type: string + compliantPlatformVersion: + type: string + transport: + type: string + familyId: + type: string + supportedClusters: + type: string + OSVersion: + type: string + parentChild: + type: string + certificationIdOfSoftwareComponent: + type: string + schemaVersion: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: vid + in: path + required: true + type: integer + format: int32 + - name: pid + in: path + required: true + type: integer + format: int32 + - name: softwareVersion + in: path + required: true + type: integer + format: int64 + - name: certificationType + in: path + required: true + type: string + tags: + - Query + /dcl/compliance/device-software-compliance: + get: + operationId: DeviceSoftwareComplianceAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + deviceSoftwareCompliance: + type: array + items: + type: object + properties: + cDCertificateId: + type: string + complianceInfo: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + softwareVersionString: + type: string + cDVersionNumber: + type: integer + format: int64 + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + owner: + type: string + history: + type: array + items: + type: object + properties: + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + cDVersionNumber: + type: integer + format: int64 + cDCertificateId: + type: string + certificationRoute: + type: string + programType: + type: string + programTypeVersion: + type: string + compliantPlatformUsed: + type: string + compliantPlatformVersion: + type: string + transport: + type: string + familyId: + type: string + supportedClusters: + type: string + OSVersion: + type: string + parentChild: + type: string + certificationIdOfSoftwareComponent: + type: string + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/compliance/device-software-compliance/{cDCertificateId}: + get: + operationId: DeviceSoftwareCompliance + responses: + '200': + description: A successful response. + schema: + type: object + properties: + deviceSoftwareCompliance: + type: object + properties: + cDCertificateId: + type: string + complianceInfo: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + softwareVersionString: + type: string + cDVersionNumber: + type: integer + format: int64 + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + owner: + type: string + history: + type: array + items: + type: object + properties: + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + cDVersionNumber: + type: integer + format: int64 + cDCertificateId: + type: string + certificationRoute: + type: string + programType: + type: string + programTypeVersion: + type: string + compliantPlatformUsed: + type: string + compliantPlatformVersion: + type: string + transport: + type: string + familyId: + type: string + supportedClusters: + type: string + OSVersion: + type: string + parentChild: + type: string + certificationIdOfSoftwareComponent: + type: string + schemaVersion: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: cDCertificateId + in: path + required: true + type: string + tags: + - Query + /dcl/compliance/provisional-models: + get: + operationId: ProvisionalModelAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + provisionalModel: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/compliance/provisional-models/{vid}/{pid}/{softwareVersion}/{certificationType}: + get: + operationId: ProvisionalModel + responses: + '200': + description: A successful response. + schema: + type: object + properties: + provisionalModel: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: vid + in: path + required: true + type: integer + format: int32 + - name: pid + in: path + required: true + type: integer + format: int32 + - name: softwareVersion + in: path + required: true + type: integer + format: int64 + - name: certificationType + in: path + required: true + type: string + tags: + - Query + /dcl/compliance/revoked-models: + get: + operationId: RevokedModelAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + revokedModel: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/compliance/revoked-models/{vid}/{pid}/{softwareVersion}/{certificationType}: + get: + operationId: RevokedModel + responses: + '200': + description: A successful response. + schema: + type: object + properties: + revokedModel: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: vid + in: path + required: true + type: integer + format: int32 + - name: pid + in: path + required: true + type: integer + format: int32 + - name: softwareVersion + in: path + required: true + type: integer + format: int64 + - name: certificationType + in: path + required: true + type: string + tags: + - Query +definitions: + Any: + type: object + properties: + '@type': + type: string + additionalProperties: {} + Status: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + ComplianceHistoryItem: + type: object + properties: + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + cDVersionNumber: + type: integer + format: int64 + PageRequest: + type: object + properties: + key: + type: string + format: byte + offset: + type: string + format: uint64 + limit: + type: string + format: uint64 + count_total: + type: boolean + reverse: + type: boolean + PageResponse: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllCertifiedModelResponse: + type: object + properties: + certifiedModel: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllComplianceInfoResponse: + type: object + properties: + complianceInfo: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + softwareVersionString: + type: string + cDVersionNumber: + type: integer + format: int64 + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + owner: + type: string + history: + type: array + items: + type: object + properties: + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + cDVersionNumber: + type: integer + format: int64 + cDCertificateId: + type: string + certificationRoute: + type: string + programType: + type: string + programTypeVersion: + type: string + compliantPlatformUsed: + type: string + compliantPlatformVersion: + type: string + transport: + type: string + familyId: + type: string + supportedClusters: + type: string + OSVersion: + type: string + parentChild: + type: string + certificationIdOfSoftwareComponent: + type: string + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllDeviceSoftwareComplianceResponse: + type: object + properties: + deviceSoftwareCompliance: + type: array + items: + type: object + properties: + cDCertificateId: + type: string + complianceInfo: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + softwareVersionString: + type: string + cDVersionNumber: + type: integer + format: int64 + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + owner: + type: string + history: + type: array + items: + type: object + properties: + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + cDVersionNumber: + type: integer + format: int64 + cDCertificateId: + type: string + certificationRoute: + type: string + programType: + type: string + programTypeVersion: + type: string + compliantPlatformUsed: + type: string + compliantPlatformVersion: + type: string + transport: + type: string + familyId: + type: string + supportedClusters: + type: string + OSVersion: + type: string + parentChild: + type: string + certificationIdOfSoftwareComponent: + type: string + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllProvisionalModelResponse: + type: object + properties: + provisionalModel: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllRevokedModelResponse: + type: object + properties: + revokedModel: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryGetCertifiedModelResponse: + type: object + properties: + certifiedModel: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + QueryGetComplianceInfoResponse: + type: object + properties: + complianceInfo: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + softwareVersionString: + type: string + cDVersionNumber: + type: integer + format: int64 + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + owner: + type: string + history: + type: array + items: + type: object + properties: + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + cDVersionNumber: + type: integer + format: int64 + cDCertificateId: + type: string + certificationRoute: + type: string + programType: + type: string + programTypeVersion: + type: string + compliantPlatformUsed: + type: string + compliantPlatformVersion: + type: string + transport: + type: string + familyId: + type: string + supportedClusters: + type: string + OSVersion: + type: string + parentChild: + type: string + certificationIdOfSoftwareComponent: + type: string + schemaVersion: + type: integer + format: int64 + QueryGetDeviceSoftwareComplianceResponse: + type: object + properties: + deviceSoftwareCompliance: + type: object + properties: + cDCertificateId: + type: string + complianceInfo: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + softwareVersionString: + type: string + cDVersionNumber: + type: integer + format: int64 + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + owner: + type: string + history: + type: array + items: + type: object + properties: + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + cDVersionNumber: + type: integer + format: int64 + cDCertificateId: + type: string + certificationRoute: + type: string + programType: + type: string + programTypeVersion: + type: string + compliantPlatformUsed: + type: string + compliantPlatformVersion: + type: string + transport: + type: string + familyId: + type: string + supportedClusters: + type: string + OSVersion: + type: string + parentChild: + type: string + certificationIdOfSoftwareComponent: + type: string + schemaVersion: + type: integer + format: int64 + QueryGetProvisionalModelResponse: + type: object + properties: + provisionalModel: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + QueryGetRevokedModelResponse: + type: object + properties: + revokedModel: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + compliance.CertifiedModel: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + compliance.ComplianceInfo: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + softwareVersionString: + type: string + cDVersionNumber: + type: integer + format: int64 + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + owner: + type: string + history: + type: array + items: + type: object + properties: + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + cDVersionNumber: + type: integer + format: int64 + cDCertificateId: + type: string + certificationRoute: + type: string + programType: + type: string + programTypeVersion: + type: string + compliantPlatformUsed: + type: string + compliantPlatformVersion: + type: string + transport: + type: string + familyId: + type: string + supportedClusters: + type: string + OSVersion: + type: string + parentChild: + type: string + certificationIdOfSoftwareComponent: + type: string + schemaVersion: + type: integer + format: int64 + compliance.DeviceSoftwareCompliance: + type: object + properties: + cDCertificateId: + type: string + complianceInfo: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + softwareVersionString: + type: string + cDVersionNumber: + type: integer + format: int64 + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + owner: + type: string + history: + type: array + items: + type: object + properties: + softwareVersionCertificationStatus: + type: integer + format: int64 + date: + type: string + reason: + type: string + cDVersionNumber: + type: integer + format: int64 + cDCertificateId: + type: string + certificationRoute: + type: string + programType: + type: string + programTypeVersion: + type: string + compliantPlatformUsed: + type: string + compliantPlatformVersion: + type: string + transport: + type: string + familyId: + type: string + supportedClusters: + type: string + OSVersion: + type: string + parentChild: + type: string + certificationIdOfSoftwareComponent: + type: string + schemaVersion: + type: integer + format: int64 + compliance.ProvisionalModel: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + compliance.RevokedModel: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + certificationType: + type: string + value: + type: boolean + MsgCertifyModelResponse: + type: object + MsgDeleteComplianceInfoResponse: + type: object + MsgProvisionModelResponse: + type: object + MsgRevokeModelResponse: + type: object + MsgUpdateComplianceInfoResponse: + type: object diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/index.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/module.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/module.ts new file mode 100755 index 000000000..b6054be06 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/module.ts @@ -0,0 +1,273 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; +import { MsgRevokeModel } from "./types/zigbeealliance/distributedcomplianceledger/compliance/tx"; +import { MsgCertifyModel } from "./types/zigbeealliance/distributedcomplianceledger/compliance/tx"; +import { MsgUpdateComplianceInfo } from "./types/zigbeealliance/distributedcomplianceledger/compliance/tx"; +import { MsgProvisionModel } from "./types/zigbeealliance/distributedcomplianceledger/compliance/tx"; +import { MsgDeleteComplianceInfo } from "./types/zigbeealliance/distributedcomplianceledger/compliance/tx"; + +import { CertifiedModel as typeCertifiedModel} from "./types" +import { ComplianceHistoryItem as typeComplianceHistoryItem} from "./types" +import { ComplianceInfo as typeComplianceInfo} from "./types" +import { DeviceSoftwareCompliance as typeDeviceSoftwareCompliance} from "./types" +import { ProvisionalModel as typeProvisionalModel} from "./types" +import { RevokedModel as typeRevokedModel} from "./types" + +export { MsgRevokeModel, MsgCertifyModel, MsgUpdateComplianceInfo, MsgProvisionModel, MsgDeleteComplianceInfo }; + +type sendMsgRevokeModelParams = { + value: MsgRevokeModel, + fee?: StdFee, + memo?: string +}; + +type sendMsgCertifyModelParams = { + value: MsgCertifyModel, + fee?: StdFee, + memo?: string +}; + +type sendMsgUpdateComplianceInfoParams = { + value: MsgUpdateComplianceInfo, + fee?: StdFee, + memo?: string +}; + +type sendMsgProvisionModelParams = { + value: MsgProvisionModel, + fee?: StdFee, + memo?: string +}; + +type sendMsgDeleteComplianceInfoParams = { + value: MsgDeleteComplianceInfo, + fee?: StdFee, + memo?: string +}; + + +type msgRevokeModelParams = { + value: MsgRevokeModel, +}; + +type msgCertifyModelParams = { + value: MsgCertifyModel, +}; + +type msgUpdateComplianceInfoParams = { + value: MsgUpdateComplianceInfo, +}; + +type msgProvisionModelParams = { + value: MsgProvisionModel, +}; + +type msgDeleteComplianceInfoParams = { + value: MsgDeleteComplianceInfo, +}; + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + async sendMsgRevokeModel({ value, fee, memo }: sendMsgRevokeModelParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgRevokeModel: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgRevokeModel({ value: MsgRevokeModel.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgRevokeModel: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgCertifyModel({ value, fee, memo }: sendMsgCertifyModelParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgCertifyModel: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgCertifyModel({ value: MsgCertifyModel.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgCertifyModel: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgUpdateComplianceInfo({ value, fee, memo }: sendMsgUpdateComplianceInfoParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgUpdateComplianceInfo: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgUpdateComplianceInfo({ value: MsgUpdateComplianceInfo.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgUpdateComplianceInfo: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgProvisionModel({ value, fee, memo }: sendMsgProvisionModelParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgProvisionModel: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgProvisionModel({ value: MsgProvisionModel.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgProvisionModel: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgDeleteComplianceInfo({ value, fee, memo }: sendMsgDeleteComplianceInfoParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgDeleteComplianceInfo: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgDeleteComplianceInfo({ value: MsgDeleteComplianceInfo.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgDeleteComplianceInfo: Could not broadcast Tx: '+ e.message) + } + }, + + + msgRevokeModel({ value }: msgRevokeModelParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.compliance.MsgRevokeModel", value: MsgRevokeModel.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgRevokeModel: Could not create message: ' + e.message) + } + }, + + msgCertifyModel({ value }: msgCertifyModelParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.compliance.MsgCertifyModel", value: MsgCertifyModel.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgCertifyModel: Could not create message: ' + e.message) + } + }, + + msgUpdateComplianceInfo({ value }: msgUpdateComplianceInfoParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.compliance.MsgUpdateComplianceInfo", value: MsgUpdateComplianceInfo.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgUpdateComplianceInfo: Could not create message: ' + e.message) + } + }, + + msgProvisionModel({ value }: msgProvisionModelParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.compliance.MsgProvisionModel", value: MsgProvisionModel.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgProvisionModel: Could not create message: ' + e.message) + } + }, + + msgDeleteComplianceInfo({ value }: msgDeleteComplianceInfoParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.compliance.MsgDeleteComplianceInfo", value: MsgDeleteComplianceInfo.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgDeleteComplianceInfo: Could not create message: ' + e.message) + } + }, + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + CertifiedModel: getStructure(typeCertifiedModel.fromPartial({})), + ComplianceHistoryItem: getStructure(typeComplianceHistoryItem.fromPartial({})), + ComplianceInfo: getStructure(typeComplianceInfo.fromPartial({})), + DeviceSoftwareCompliance: getStructure(typeDeviceSoftwareCompliance.fromPartial({})), + ProvisionalModel: getStructure(typeProvisionalModel.fromPartial({})), + RevokedModel: getStructure(typeRevokedModel.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + ZigbeeallianceDistributedcomplianceledgerCompliance: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/registry.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/registry.ts new file mode 100755 index 000000000..56859a84d --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/registry.ts @@ -0,0 +1,17 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; +import { MsgRevokeModel } from "./types/zigbeealliance/distributedcomplianceledger/compliance/tx"; +import { MsgCertifyModel } from "./types/zigbeealliance/distributedcomplianceledger/compliance/tx"; +import { MsgUpdateComplianceInfo } from "./types/zigbeealliance/distributedcomplianceledger/compliance/tx"; +import { MsgProvisionModel } from "./types/zigbeealliance/distributedcomplianceledger/compliance/tx"; +import { MsgDeleteComplianceInfo } from "./types/zigbeealliance/distributedcomplianceledger/compliance/tx"; + +const msgTypes: Array<[string, GeneratedType]> = [ + ["/zigbeealliance.distributedcomplianceledger.compliance.MsgRevokeModel", MsgRevokeModel], + ["/zigbeealliance.distributedcomplianceledger.compliance.MsgCertifyModel", MsgCertifyModel], + ["/zigbeealliance.distributedcomplianceledger.compliance.MsgUpdateComplianceInfo", MsgUpdateComplianceInfo], + ["/zigbeealliance.distributedcomplianceledger.compliance.MsgProvisionModel", MsgProvisionModel], + ["/zigbeealliance.distributedcomplianceledger.compliance.MsgDeleteComplianceInfo", MsgDeleteComplianceInfo], + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/rest.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/rest.ts new file mode 100644 index 000000000..8c4a4f930 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/rest.ts @@ -0,0 +1,650 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +export interface ComplianceComplianceHistoryItem { + /** @format int64 */ + softwareVersionCertificationStatus?: number; + date?: string; + reason?: string; + + /** @format int64 */ + cDVersionNumber?: number; +} + +export type ComplianceMsgCertifyModelResponse = object; + +export type ComplianceMsgDeleteComplianceInfoResponse = object; + +export type ComplianceMsgProvisionModelResponse = object; + +export type ComplianceMsgRevokeModelResponse = object; + +export type ComplianceMsgUpdateComplianceInfoResponse = object; + +export interface ComplianceQueryAllCertifiedModelResponse { + certifiedModel?: DistributedcomplianceledgercomplianceCertifiedModel[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface ComplianceQueryAllComplianceInfoResponse { + complianceInfo?: DistributedcomplianceledgercomplianceComplianceInfo[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface ComplianceQueryAllDeviceSoftwareComplianceResponse { + deviceSoftwareCompliance?: DistributedcomplianceledgercomplianceDeviceSoftwareCompliance[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface ComplianceQueryAllProvisionalModelResponse { + provisionalModel?: DistributedcomplianceledgercomplianceProvisionalModel[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface ComplianceQueryAllRevokedModelResponse { + revokedModel?: DistributedcomplianceledgercomplianceRevokedModel[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface ComplianceQueryGetCertifiedModelResponse { + certifiedModel?: DistributedcomplianceledgercomplianceCertifiedModel; +} + +export interface ComplianceQueryGetComplianceInfoResponse { + complianceInfo?: DistributedcomplianceledgercomplianceComplianceInfo; +} + +export interface ComplianceQueryGetDeviceSoftwareComplianceResponse { + deviceSoftwareCompliance?: DistributedcomplianceledgercomplianceDeviceSoftwareCompliance; +} + +export interface ComplianceQueryGetProvisionalModelResponse { + provisionalModel?: DistributedcomplianceledgercomplianceProvisionalModel; +} + +export interface ComplianceQueryGetRevokedModelResponse { + revokedModel?: DistributedcomplianceledgercomplianceRevokedModel; +} + +export interface DistributedcomplianceledgercomplianceCertifiedModel { + /** @format int32 */ + vid?: number; + + /** @format int32 */ + pid?: number; + + /** @format int64 */ + softwareVersion?: number; + certificationType?: string; + value?: boolean; +} + +export interface DistributedcomplianceledgercomplianceComplianceInfo { + /** @format int32 */ + vid?: number; + + /** @format int32 */ + pid?: number; + + /** @format int64 */ + softwareVersion?: number; + certificationType?: string; + softwareVersionString?: string; + + /** @format int64 */ + cDVersionNumber?: number; + + /** @format int64 */ + softwareVersionCertificationStatus?: number; + date?: string; + reason?: string; + owner?: string; + history?: ComplianceComplianceHistoryItem[]; + cDCertificateId?: string; + certificationRoute?: string; + programType?: string; + programTypeVersion?: string; + compliantPlatformUsed?: string; + compliantPlatformVersion?: string; + transport?: string; + familyId?: string; + supportedClusters?: string; + OSVersion?: string; + parentChild?: string; + certificationIdOfSoftwareComponent?: string; + + /** @format int64 */ + schemaVersion?: number; +} + +export interface DistributedcomplianceledgercomplianceDeviceSoftwareCompliance { + cDCertificateId?: string; + complianceInfo?: DistributedcomplianceledgercomplianceComplianceInfo[]; +} + +export interface DistributedcomplianceledgercomplianceProvisionalModel { + /** @format int32 */ + vid?: number; + + /** @format int32 */ + pid?: number; + + /** @format int64 */ + softwareVersion?: number; + certificationType?: string; + value?: boolean; +} + +export interface DistributedcomplianceledgercomplianceRevokedModel { + /** @format int32 */ + vid?: number; + + /** @format int32 */ + pid?: number; + + /** @format int64 */ + softwareVersion?: number; + certificationType?: string; + value?: boolean; +} + +export interface ProtobufAny { + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +/** +* message SomeRequest { + Foo some_parameter = 1; + PageRequest pagination = 2; + } +*/ +export interface V1Beta1PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + * @format byte + */ + key?: string; + + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + * @format uint64 + */ + offset?: string; + + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + * @format uint64 + */ + limit?: string; + + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + count_total?: boolean; + + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse?: boolean; +} + +/** +* PageResponse is to be embedded in gRPC response messages where the +corresponding request message has used PageRequest. + + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } +*/ +export interface V1Beta1PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + * @format byte + */ + next_key?: string; + + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + * @format uint64 + */ + total?: string; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title zigbeealliance/distributedcomplianceledger/compliance/certified_model.proto + * @version version not set + */ +export class Api extends HttpClient { + /** + * No description + * + * @tags Query + * @name QueryCertifiedModelAll + * @summary Queries a list of CertifiedModel items. + * @request GET:/dcl/compliance/certified-models + */ + queryCertifiedModelAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/compliance/certified-models`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryCertifiedModel + * @summary Queries a CertifiedModel by index. + * @request GET:/dcl/compliance/certified-models/{vid}/{pid}/{softwareVersion}/{certificationType} + */ + queryCertifiedModel = ( + vid: number, + pid: number, + softwareVersion: number, + certificationType: string, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/compliance/certified-models/${vid}/${pid}/${softwareVersion}/${certificationType}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryComplianceInfoAll + * @summary Queries a list of ComplianceInfo items. + * @request GET:/dcl/compliance/compliance-info + */ + queryComplianceInfoAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/compliance/compliance-info`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryComplianceInfo + * @summary Queries a ComplianceInfo by index. + * @request GET:/dcl/compliance/compliance-info/{vid}/{pid}/{softwareVersion}/{certificationType} + */ + queryComplianceInfo = ( + vid: number, + pid: number, + softwareVersion: number, + certificationType: string, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/compliance/compliance-info/${vid}/${pid}/${softwareVersion}/${certificationType}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryDeviceSoftwareComplianceAll + * @summary Queries a list of DeviceSoftwareCompliance items. + * @request GET:/dcl/compliance/device-software-compliance + */ + queryDeviceSoftwareComplianceAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/compliance/device-software-compliance`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryDeviceSoftwareCompliance + * @summary Queries a DeviceSoftwareCompliance by index. + * @request GET:/dcl/compliance/device-software-compliance/{cDCertificateId} + */ + queryDeviceSoftwareCompliance = (cDCertificateId: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/compliance/device-software-compliance/${cDCertificateId}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryProvisionalModelAll + * @summary Queries a list of ProvisionalModel items. + * @request GET:/dcl/compliance/provisional-models + */ + queryProvisionalModelAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/compliance/provisional-models`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryProvisionalModel + * @summary Queries a ProvisionalModel by index. + * @request GET:/dcl/compliance/provisional-models/{vid}/{pid}/{softwareVersion}/{certificationType} + */ + queryProvisionalModel = ( + vid: number, + pid: number, + softwareVersion: number, + certificationType: string, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/compliance/provisional-models/${vid}/${pid}/${softwareVersion}/${certificationType}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRevokedModelAll + * @summary Queries a list of RevokedModel items. + * @request GET:/dcl/compliance/revoked-models + */ + queryRevokedModelAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/compliance/revoked-models`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRevokedModel + * @summary Queries a RevokedModel by index. + * @request GET:/dcl/compliance/revoked-models/{vid}/{pid}/{softwareVersion}/{certificationType} + */ + queryRevokedModel = ( + vid: number, + pid: number, + softwareVersion: number, + certificationType: string, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/compliance/revoked-models/${vid}/${pid}/${softwareVersion}/${certificationType}`, + method: "GET", + format: "json", + ...params, + }); +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types.ts new file mode 100755 index 000000000..00e60b529 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types.ts @@ -0,0 +1,17 @@ +import { CertifiedModel } from "./types/zigbeealliance/distributedcomplianceledger/compliance/certified_model" +import { ComplianceHistoryItem } from "./types/zigbeealliance/distributedcomplianceledger/compliance/compliance_history_item" +import { ComplianceInfo } from "./types/zigbeealliance/distributedcomplianceledger/compliance/compliance_info" +import { DeviceSoftwareCompliance } from "./types/zigbeealliance/distributedcomplianceledger/compliance/device_software_compliance" +import { ProvisionalModel } from "./types/zigbeealliance/distributedcomplianceledger/compliance/provisional_model" +import { RevokedModel } from "./types/zigbeealliance/distributedcomplianceledger/compliance/revoked_model" + + +export { + CertifiedModel, + ComplianceHistoryItem, + ComplianceInfo, + DeviceSoftwareCompliance, + ProvisionalModel, + RevokedModel, + + } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/cosmos/base/query/v1beta1/pagination.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/cosmos/base/query/v1beta1/pagination.ts new file mode 100644 index 000000000..a31ca249d --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/cosmos/base/query/v1beta1/pagination.ts @@ -0,0 +1,286 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.base.query.v1beta1"; + +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset: number; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: number; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + countTotal: boolean; + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse: boolean; +} + +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total: number; +} + +function createBasePageRequest(): PageRequest { + return { key: new Uint8Array(), offset: 0, limit: 0, countTotal: false, reverse: false }; +} + +export const PageRequest = { + encode(message: PageRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.offset !== 0) { + writer.uint32(16).uint64(message.offset); + } + if (message.limit !== 0) { + writer.uint32(24).uint64(message.limit); + } + if (message.countTotal === true) { + writer.uint32(32).bool(message.countTotal); + } + if (message.reverse === true) { + writer.uint32(40).bool(message.reverse); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + case 2: + message.offset = longToNumber(reader.uint64() as Long); + break; + case 3: + message.limit = longToNumber(reader.uint64() as Long); + break; + case 4: + message.countTotal = reader.bool(); + break; + case 5: + message.reverse = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageRequest { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + offset: isSet(object.offset) ? Number(object.offset) : 0, + limit: isSet(object.limit) ? Number(object.limit) : 0, + countTotal: isSet(object.countTotal) ? Boolean(object.countTotal) : false, + reverse: isSet(object.reverse) ? Boolean(object.reverse) : false, + }; + }, + + toJSON(message: PageRequest): unknown { + const obj: any = {}; + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.offset !== undefined && (obj.offset = Math.round(message.offset)); + message.limit !== undefined && (obj.limit = Math.round(message.limit)); + message.countTotal !== undefined && (obj.countTotal = message.countTotal); + message.reverse !== undefined && (obj.reverse = message.reverse); + return obj; + }, + + fromPartial, I>>(object: I): PageRequest { + const message = createBasePageRequest(); + message.key = object.key ?? new Uint8Array(); + message.offset = object.offset ?? 0; + message.limit = object.limit ?? 0; + message.countTotal = object.countTotal ?? false; + message.reverse = object.reverse ?? false; + return message; + }, +}; + +function createBasePageResponse(): PageResponse { + return { nextKey: new Uint8Array(), total: 0 }; +} + +export const PageResponse = { + encode(message: PageResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextKey.length !== 0) { + writer.uint32(10).bytes(message.nextKey); + } + if (message.total !== 0) { + writer.uint32(16).uint64(message.total); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nextKey = reader.bytes(); + break; + case 2: + message.total = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageResponse { + return { + nextKey: isSet(object.nextKey) ? bytesFromBase64(object.nextKey) : new Uint8Array(), + total: isSet(object.total) ? Number(object.total) : 0, + }; + }, + + toJSON(message: PageResponse): unknown { + const obj: any = {}; + message.nextKey !== undefined + && (obj.nextKey = base64FromBytes(message.nextKey !== undefined ? message.nextKey : new Uint8Array())); + message.total !== undefined && (obj.total = Math.round(message.total)); + return obj; + }, + + fromPartial, I>>(object: I): PageResponse { + const message = createBasePageResponse(); + message.nextKey = object.nextKey ?? new Uint8Array(); + message.total = object.total ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/cosmos_proto/cosmos.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..79c8d3486 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/cosmos_proto/cosmos.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/gogoproto/gogo.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/google/api/annotations.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/google/api/annotations.ts new file mode 100644 index 000000000..aace4787f --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/google/api/annotations.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "google.api"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/google/api/http.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/google/api/http.ts new file mode 100644 index 000000000..17e770d15 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/google/api/http.ts @@ -0,0 +1,589 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get: + | string + | undefined; + /** Used for updating a resource. */ + put: + | string + | undefined; + /** Used for creating a resource. */ + post: + | string + | undefined; + /** Used for deleting a resource. */ + delete: + | string + | undefined; + /** Used for updating a resource. */ + patch: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined + && (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined + && (obj.custom = message.custom ? CustomHttpPattern.toJSON(message.custom) : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.additionalBindings = []; + } + return obj; + }, + + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/google/protobuf/descriptor.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/certified_model.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/certified_model.ts new file mode 100644 index 000000000..98488695e --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/certified_model.ts @@ -0,0 +1,112 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.compliance"; + +export interface CertifiedModel { + vid: number; + pid: number; + softwareVersion: number; + certificationType: string; + value: boolean; +} + +function createBaseCertifiedModel(): CertifiedModel { + return { vid: 0, pid: 0, softwareVersion: 0, certificationType: "", value: false }; +} + +export const CertifiedModel = { + encode(message: CertifiedModel, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(16).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(24).uint32(message.softwareVersion); + } + if (message.certificationType !== "") { + writer.uint32(34).string(message.certificationType); + } + if (message.value === true) { + writer.uint32(40).bool(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CertifiedModel { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCertifiedModel(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.pid = reader.int32(); + break; + case 3: + message.softwareVersion = reader.uint32(); + break; + case 4: + message.certificationType = reader.string(); + break; + case 5: + message.value = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CertifiedModel { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + certificationType: isSet(object.certificationType) ? String(object.certificationType) : "", + value: isSet(object.value) ? Boolean(object.value) : false, + }; + }, + + toJSON(message: CertifiedModel): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.certificationType !== undefined && (obj.certificationType = message.certificationType); + message.value !== undefined && (obj.value = message.value); + return obj; + }, + + fromPartial, I>>(object: I): CertifiedModel { + const message = createBaseCertifiedModel(); + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.certificationType = object.certificationType ?? ""; + message.value = object.value ?? false; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/compliance_history_item.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/compliance_history_item.ts new file mode 100644 index 000000000..900947340 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/compliance_history_item.ts @@ -0,0 +1,105 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.compliance"; + +export interface ComplianceHistoryItem { + softwareVersionCertificationStatus: number; + date: string; + reason: string; + cDVersionNumber: number; +} + +function createBaseComplianceHistoryItem(): ComplianceHistoryItem { + return { softwareVersionCertificationStatus: 0, date: "", reason: "", cDVersionNumber: 0 }; +} + +export const ComplianceHistoryItem = { + encode(message: ComplianceHistoryItem, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.softwareVersionCertificationStatus !== 0) { + writer.uint32(8).uint32(message.softwareVersionCertificationStatus); + } + if (message.date !== "") { + writer.uint32(18).string(message.date); + } + if (message.reason !== "") { + writer.uint32(26).string(message.reason); + } + if (message.cDVersionNumber !== 0) { + writer.uint32(32).uint32(message.cDVersionNumber); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ComplianceHistoryItem { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseComplianceHistoryItem(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.softwareVersionCertificationStatus = reader.uint32(); + break; + case 2: + message.date = reader.string(); + break; + case 3: + message.reason = reader.string(); + break; + case 4: + message.cDVersionNumber = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ComplianceHistoryItem { + return { + softwareVersionCertificationStatus: isSet(object.softwareVersionCertificationStatus) + ? Number(object.softwareVersionCertificationStatus) + : 0, + date: isSet(object.date) ? String(object.date) : "", + reason: isSet(object.reason) ? String(object.reason) : "", + cDVersionNumber: isSet(object.cDVersionNumber) ? Number(object.cDVersionNumber) : 0, + }; + }, + + toJSON(message: ComplianceHistoryItem): unknown { + const obj: any = {}; + message.softwareVersionCertificationStatus !== undefined + && (obj.softwareVersionCertificationStatus = Math.round(message.softwareVersionCertificationStatus)); + message.date !== undefined && (obj.date = message.date); + message.reason !== undefined && (obj.reason = message.reason); + message.cDVersionNumber !== undefined && (obj.cDVersionNumber = Math.round(message.cDVersionNumber)); + return obj; + }, + + fromPartial, I>>(object: I): ComplianceHistoryItem { + const message = createBaseComplianceHistoryItem(); + message.softwareVersionCertificationStatus = object.softwareVersionCertificationStatus ?? 0; + message.date = object.date ?? ""; + message.reason = object.reason ?? ""; + message.cDVersionNumber = object.cDVersionNumber ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/compliance_info.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/compliance_info.ts new file mode 100644 index 000000000..08e56899f --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/compliance_info.ts @@ -0,0 +1,338 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { ComplianceHistoryItem } from "./compliance_history_item"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.compliance"; + +export interface ComplianceInfo { + vid: number; + pid: number; + softwareVersion: number; + certificationType: string; + softwareVersionString: string; + cDVersionNumber: number; + softwareVersionCertificationStatus: number; + date: string; + reason: string; + owner: string; + history: ComplianceHistoryItem[]; + cDCertificateId: string; + certificationRoute: string; + programType: string; + programTypeVersion: string; + compliantPlatformUsed: string; + compliantPlatformVersion: string; + transport: string; + familyId: string; + supportedClusters: string; + OSVersion: string; + parentChild: string; + certificationIdOfSoftwareComponent: string; + schemaVersion: number; +} + +function createBaseComplianceInfo(): ComplianceInfo { + return { + vid: 0, + pid: 0, + softwareVersion: 0, + certificationType: "", + softwareVersionString: "", + cDVersionNumber: 0, + softwareVersionCertificationStatus: 0, + date: "", + reason: "", + owner: "", + history: [], + cDCertificateId: "", + certificationRoute: "", + programType: "", + programTypeVersion: "", + compliantPlatformUsed: "", + compliantPlatformVersion: "", + transport: "", + familyId: "", + supportedClusters: "", + OSVersion: "", + parentChild: "", + certificationIdOfSoftwareComponent: "", + schemaVersion: 0, + }; +} + +export const ComplianceInfo = { + encode(message: ComplianceInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(16).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(24).uint32(message.softwareVersion); + } + if (message.certificationType !== "") { + writer.uint32(34).string(message.certificationType); + } + if (message.softwareVersionString !== "") { + writer.uint32(42).string(message.softwareVersionString); + } + if (message.cDVersionNumber !== 0) { + writer.uint32(48).uint32(message.cDVersionNumber); + } + if (message.softwareVersionCertificationStatus !== 0) { + writer.uint32(56).uint32(message.softwareVersionCertificationStatus); + } + if (message.date !== "") { + writer.uint32(66).string(message.date); + } + if (message.reason !== "") { + writer.uint32(74).string(message.reason); + } + if (message.owner !== "") { + writer.uint32(82).string(message.owner); + } + for (const v of message.history) { + ComplianceHistoryItem.encode(v!, writer.uint32(90).fork()).ldelim(); + } + if (message.cDCertificateId !== "") { + writer.uint32(98).string(message.cDCertificateId); + } + if (message.certificationRoute !== "") { + writer.uint32(106).string(message.certificationRoute); + } + if (message.programType !== "") { + writer.uint32(114).string(message.programType); + } + if (message.programTypeVersion !== "") { + writer.uint32(122).string(message.programTypeVersion); + } + if (message.compliantPlatformUsed !== "") { + writer.uint32(130).string(message.compliantPlatformUsed); + } + if (message.compliantPlatformVersion !== "") { + writer.uint32(138).string(message.compliantPlatformVersion); + } + if (message.transport !== "") { + writer.uint32(146).string(message.transport); + } + if (message.familyId !== "") { + writer.uint32(154).string(message.familyId); + } + if (message.supportedClusters !== "") { + writer.uint32(162).string(message.supportedClusters); + } + if (message.OSVersion !== "") { + writer.uint32(170).string(message.OSVersion); + } + if (message.parentChild !== "") { + writer.uint32(178).string(message.parentChild); + } + if (message.certificationIdOfSoftwareComponent !== "") { + writer.uint32(186).string(message.certificationIdOfSoftwareComponent); + } + if (message.schemaVersion !== 0) { + writer.uint32(192).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ComplianceInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseComplianceInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.pid = reader.int32(); + break; + case 3: + message.softwareVersion = reader.uint32(); + break; + case 4: + message.certificationType = reader.string(); + break; + case 5: + message.softwareVersionString = reader.string(); + break; + case 6: + message.cDVersionNumber = reader.uint32(); + break; + case 7: + message.softwareVersionCertificationStatus = reader.uint32(); + break; + case 8: + message.date = reader.string(); + break; + case 9: + message.reason = reader.string(); + break; + case 10: + message.owner = reader.string(); + break; + case 11: + message.history.push(ComplianceHistoryItem.decode(reader, reader.uint32())); + break; + case 12: + message.cDCertificateId = reader.string(); + break; + case 13: + message.certificationRoute = reader.string(); + break; + case 14: + message.programType = reader.string(); + break; + case 15: + message.programTypeVersion = reader.string(); + break; + case 16: + message.compliantPlatformUsed = reader.string(); + break; + case 17: + message.compliantPlatformVersion = reader.string(); + break; + case 18: + message.transport = reader.string(); + break; + case 19: + message.familyId = reader.string(); + break; + case 20: + message.supportedClusters = reader.string(); + break; + case 21: + message.OSVersion = reader.string(); + break; + case 22: + message.parentChild = reader.string(); + break; + case 23: + message.certificationIdOfSoftwareComponent = reader.string(); + break; + case 24: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ComplianceInfo { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + certificationType: isSet(object.certificationType) ? String(object.certificationType) : "", + softwareVersionString: isSet(object.softwareVersionString) ? String(object.softwareVersionString) : "", + cDVersionNumber: isSet(object.cDVersionNumber) ? Number(object.cDVersionNumber) : 0, + softwareVersionCertificationStatus: isSet(object.softwareVersionCertificationStatus) + ? Number(object.softwareVersionCertificationStatus) + : 0, + date: isSet(object.date) ? String(object.date) : "", + reason: isSet(object.reason) ? String(object.reason) : "", + owner: isSet(object.owner) ? String(object.owner) : "", + history: Array.isArray(object?.history) ? object.history.map((e: any) => ComplianceHistoryItem.fromJSON(e)) : [], + cDCertificateId: isSet(object.cDCertificateId) ? String(object.cDCertificateId) : "", + certificationRoute: isSet(object.certificationRoute) ? String(object.certificationRoute) : "", + programType: isSet(object.programType) ? String(object.programType) : "", + programTypeVersion: isSet(object.programTypeVersion) ? String(object.programTypeVersion) : "", + compliantPlatformUsed: isSet(object.compliantPlatformUsed) ? String(object.compliantPlatformUsed) : "", + compliantPlatformVersion: isSet(object.compliantPlatformVersion) ? String(object.compliantPlatformVersion) : "", + transport: isSet(object.transport) ? String(object.transport) : "", + familyId: isSet(object.familyId) ? String(object.familyId) : "", + supportedClusters: isSet(object.supportedClusters) ? String(object.supportedClusters) : "", + OSVersion: isSet(object.OSVersion) ? String(object.OSVersion) : "", + parentChild: isSet(object.parentChild) ? String(object.parentChild) : "", + certificationIdOfSoftwareComponent: isSet(object.certificationIdOfSoftwareComponent) + ? String(object.certificationIdOfSoftwareComponent) + : "", + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: ComplianceInfo): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.certificationType !== undefined && (obj.certificationType = message.certificationType); + message.softwareVersionString !== undefined && (obj.softwareVersionString = message.softwareVersionString); + message.cDVersionNumber !== undefined && (obj.cDVersionNumber = Math.round(message.cDVersionNumber)); + message.softwareVersionCertificationStatus !== undefined + && (obj.softwareVersionCertificationStatus = Math.round(message.softwareVersionCertificationStatus)); + message.date !== undefined && (obj.date = message.date); + message.reason !== undefined && (obj.reason = message.reason); + message.owner !== undefined && (obj.owner = message.owner); + if (message.history) { + obj.history = message.history.map((e) => e ? ComplianceHistoryItem.toJSON(e) : undefined); + } else { + obj.history = []; + } + message.cDCertificateId !== undefined && (obj.cDCertificateId = message.cDCertificateId); + message.certificationRoute !== undefined && (obj.certificationRoute = message.certificationRoute); + message.programType !== undefined && (obj.programType = message.programType); + message.programTypeVersion !== undefined && (obj.programTypeVersion = message.programTypeVersion); + message.compliantPlatformUsed !== undefined && (obj.compliantPlatformUsed = message.compliantPlatformUsed); + message.compliantPlatformVersion !== undefined && (obj.compliantPlatformVersion = message.compliantPlatformVersion); + message.transport !== undefined && (obj.transport = message.transport); + message.familyId !== undefined && (obj.familyId = message.familyId); + message.supportedClusters !== undefined && (obj.supportedClusters = message.supportedClusters); + message.OSVersion !== undefined && (obj.OSVersion = message.OSVersion); + message.parentChild !== undefined && (obj.parentChild = message.parentChild); + message.certificationIdOfSoftwareComponent !== undefined + && (obj.certificationIdOfSoftwareComponent = message.certificationIdOfSoftwareComponent); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): ComplianceInfo { + const message = createBaseComplianceInfo(); + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.certificationType = object.certificationType ?? ""; + message.softwareVersionString = object.softwareVersionString ?? ""; + message.cDVersionNumber = object.cDVersionNumber ?? 0; + message.softwareVersionCertificationStatus = object.softwareVersionCertificationStatus ?? 0; + message.date = object.date ?? ""; + message.reason = object.reason ?? ""; + message.owner = object.owner ?? ""; + message.history = object.history?.map((e) => ComplianceHistoryItem.fromPartial(e)) || []; + message.cDCertificateId = object.cDCertificateId ?? ""; + message.certificationRoute = object.certificationRoute ?? ""; + message.programType = object.programType ?? ""; + message.programTypeVersion = object.programTypeVersion ?? ""; + message.compliantPlatformUsed = object.compliantPlatformUsed ?? ""; + message.compliantPlatformVersion = object.compliantPlatformVersion ?? ""; + message.transport = object.transport ?? ""; + message.familyId = object.familyId ?? ""; + message.supportedClusters = object.supportedClusters ?? ""; + message.OSVersion = object.OSVersion ?? ""; + message.parentChild = object.parentChild ?? ""; + message.certificationIdOfSoftwareComponent = object.certificationIdOfSoftwareComponent ?? ""; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/device_software_compliance.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/device_software_compliance.ts new file mode 100644 index 000000000..27876e633 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/device_software_compliance.ts @@ -0,0 +1,89 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { ComplianceInfo } from "./compliance_info"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.compliance"; + +export interface DeviceSoftwareCompliance { + cDCertificateId: string; + complianceInfo: ComplianceInfo[]; +} + +function createBaseDeviceSoftwareCompliance(): DeviceSoftwareCompliance { + return { cDCertificateId: "", complianceInfo: [] }; +} + +export const DeviceSoftwareCompliance = { + encode(message: DeviceSoftwareCompliance, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.cDCertificateId !== "") { + writer.uint32(10).string(message.cDCertificateId); + } + for (const v of message.complianceInfo) { + ComplianceInfo.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DeviceSoftwareCompliance { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDeviceSoftwareCompliance(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.cDCertificateId = reader.string(); + break; + case 2: + message.complianceInfo.push(ComplianceInfo.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DeviceSoftwareCompliance { + return { + cDCertificateId: isSet(object.cDCertificateId) ? String(object.cDCertificateId) : "", + complianceInfo: Array.isArray(object?.complianceInfo) + ? object.complianceInfo.map((e: any) => ComplianceInfo.fromJSON(e)) + : [], + }; + }, + + toJSON(message: DeviceSoftwareCompliance): unknown { + const obj: any = {}; + message.cDCertificateId !== undefined && (obj.cDCertificateId = message.cDCertificateId); + if (message.complianceInfo) { + obj.complianceInfo = message.complianceInfo.map((e) => e ? ComplianceInfo.toJSON(e) : undefined); + } else { + obj.complianceInfo = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DeviceSoftwareCompliance { + const message = createBaseDeviceSoftwareCompliance(); + message.cDCertificateId = object.cDCertificateId ?? ""; + message.complianceInfo = object.complianceInfo?.map((e) => ComplianceInfo.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/genesis.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/genesis.ts new file mode 100644 index 000000000..bbcd14975 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/genesis.ts @@ -0,0 +1,154 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { CertifiedModel } from "./certified_model"; +import { ComplianceInfo } from "./compliance_info"; +import { DeviceSoftwareCompliance } from "./device_software_compliance"; +import { ProvisionalModel } from "./provisional_model"; +import { RevokedModel } from "./revoked_model"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.compliance"; + +/** GenesisState defines the compliance module's genesis state. */ +export interface GenesisState { + complianceInfoList: ComplianceInfo[]; + certifiedModelList: CertifiedModel[]; + revokedModelList: RevokedModel[]; + provisionalModelList: ProvisionalModel[]; + /** this line is used by starport scaffolding # genesis/proto/state */ + deviceSoftwareComplianceList: DeviceSoftwareCompliance[]; +} + +function createBaseGenesisState(): GenesisState { + return { + complianceInfoList: [], + certifiedModelList: [], + revokedModelList: [], + provisionalModelList: [], + deviceSoftwareComplianceList: [], + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.complianceInfoList) { + ComplianceInfo.encode(v!, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.certifiedModelList) { + CertifiedModel.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.revokedModelList) { + RevokedModel.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.provisionalModelList) { + ProvisionalModel.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.deviceSoftwareComplianceList) { + DeviceSoftwareCompliance.encode(v!, writer.uint32(42).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.complianceInfoList.push(ComplianceInfo.decode(reader, reader.uint32())); + break; + case 2: + message.certifiedModelList.push(CertifiedModel.decode(reader, reader.uint32())); + break; + case 3: + message.revokedModelList.push(RevokedModel.decode(reader, reader.uint32())); + break; + case 4: + message.provisionalModelList.push(ProvisionalModel.decode(reader, reader.uint32())); + break; + case 5: + message.deviceSoftwareComplianceList.push(DeviceSoftwareCompliance.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GenesisState { + return { + complianceInfoList: Array.isArray(object?.complianceInfoList) + ? object.complianceInfoList.map((e: any) => ComplianceInfo.fromJSON(e)) + : [], + certifiedModelList: Array.isArray(object?.certifiedModelList) + ? object.certifiedModelList.map((e: any) => CertifiedModel.fromJSON(e)) + : [], + revokedModelList: Array.isArray(object?.revokedModelList) + ? object.revokedModelList.map((e: any) => RevokedModel.fromJSON(e)) + : [], + provisionalModelList: Array.isArray(object?.provisionalModelList) + ? object.provisionalModelList.map((e: any) => ProvisionalModel.fromJSON(e)) + : [], + deviceSoftwareComplianceList: Array.isArray(object?.deviceSoftwareComplianceList) + ? object.deviceSoftwareComplianceList.map((e: any) => DeviceSoftwareCompliance.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GenesisState): unknown { + const obj: any = {}; + if (message.complianceInfoList) { + obj.complianceInfoList = message.complianceInfoList.map((e) => e ? ComplianceInfo.toJSON(e) : undefined); + } else { + obj.complianceInfoList = []; + } + if (message.certifiedModelList) { + obj.certifiedModelList = message.certifiedModelList.map((e) => e ? CertifiedModel.toJSON(e) : undefined); + } else { + obj.certifiedModelList = []; + } + if (message.revokedModelList) { + obj.revokedModelList = message.revokedModelList.map((e) => e ? RevokedModel.toJSON(e) : undefined); + } else { + obj.revokedModelList = []; + } + if (message.provisionalModelList) { + obj.provisionalModelList = message.provisionalModelList.map((e) => e ? ProvisionalModel.toJSON(e) : undefined); + } else { + obj.provisionalModelList = []; + } + if (message.deviceSoftwareComplianceList) { + obj.deviceSoftwareComplianceList = message.deviceSoftwareComplianceList.map((e) => + e ? DeviceSoftwareCompliance.toJSON(e) : undefined + ); + } else { + obj.deviceSoftwareComplianceList = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GenesisState { + const message = createBaseGenesisState(); + message.complianceInfoList = object.complianceInfoList?.map((e) => ComplianceInfo.fromPartial(e)) || []; + message.certifiedModelList = object.certifiedModelList?.map((e) => CertifiedModel.fromPartial(e)) || []; + message.revokedModelList = object.revokedModelList?.map((e) => RevokedModel.fromPartial(e)) || []; + message.provisionalModelList = object.provisionalModelList?.map((e) => ProvisionalModel.fromPartial(e)) || []; + message.deviceSoftwareComplianceList = + object.deviceSoftwareComplianceList?.map((e) => DeviceSoftwareCompliance.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/provisional_model.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/provisional_model.ts new file mode 100644 index 000000000..877969c29 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/provisional_model.ts @@ -0,0 +1,112 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.compliance"; + +export interface ProvisionalModel { + vid: number; + pid: number; + softwareVersion: number; + certificationType: string; + value: boolean; +} + +function createBaseProvisionalModel(): ProvisionalModel { + return { vid: 0, pid: 0, softwareVersion: 0, certificationType: "", value: false }; +} + +export const ProvisionalModel = { + encode(message: ProvisionalModel, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(16).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(24).uint32(message.softwareVersion); + } + if (message.certificationType !== "") { + writer.uint32(34).string(message.certificationType); + } + if (message.value === true) { + writer.uint32(40).bool(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProvisionalModel { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProvisionalModel(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.pid = reader.int32(); + break; + case 3: + message.softwareVersion = reader.uint32(); + break; + case 4: + message.certificationType = reader.string(); + break; + case 5: + message.value = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ProvisionalModel { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + certificationType: isSet(object.certificationType) ? String(object.certificationType) : "", + value: isSet(object.value) ? Boolean(object.value) : false, + }; + }, + + toJSON(message: ProvisionalModel): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.certificationType !== undefined && (obj.certificationType = message.certificationType); + message.value !== undefined && (obj.value = message.value); + return obj; + }, + + fromPartial, I>>(object: I): ProvisionalModel { + const message = createBaseProvisionalModel(); + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.certificationType = object.certificationType ?? ""; + message.value = object.value ?? false; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/query.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/query.ts new file mode 100644 index 000000000..a98e254bb --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/query.ts @@ -0,0 +1,1508 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { PageRequest, PageResponse } from "../../../cosmos/base/query/v1beta1/pagination"; +import { CertifiedModel } from "./certified_model"; +import { ComplianceInfo } from "./compliance_info"; +import { DeviceSoftwareCompliance } from "./device_software_compliance"; +import { ProvisionalModel } from "./provisional_model"; +import { RevokedModel } from "./revoked_model"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.compliance"; + +export interface QueryGetComplianceInfoRequest { + vid: number; + pid: number; + softwareVersion: number; + certificationType: string; +} + +export interface QueryGetComplianceInfoResponse { + complianceInfo: ComplianceInfo | undefined; +} + +export interface QueryAllComplianceInfoRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllComplianceInfoResponse { + complianceInfo: ComplianceInfo[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetCertifiedModelRequest { + vid: number; + pid: number; + softwareVersion: number; + certificationType: string; +} + +export interface QueryGetCertifiedModelResponse { + certifiedModel: CertifiedModel | undefined; +} + +export interface QueryAllCertifiedModelRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllCertifiedModelResponse { + certifiedModel: CertifiedModel[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetRevokedModelRequest { + vid: number; + pid: number; + softwareVersion: number; + certificationType: string; +} + +export interface QueryGetRevokedModelResponse { + revokedModel: RevokedModel | undefined; +} + +export interface QueryAllRevokedModelRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllRevokedModelResponse { + revokedModel: RevokedModel[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetProvisionalModelRequest { + vid: number; + pid: number; + softwareVersion: number; + certificationType: string; +} + +export interface QueryGetProvisionalModelResponse { + provisionalModel: ProvisionalModel | undefined; +} + +export interface QueryAllProvisionalModelRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllProvisionalModelResponse { + provisionalModel: ProvisionalModel[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetDeviceSoftwareComplianceRequest { + cDCertificateId: string; +} + +export interface QueryGetDeviceSoftwareComplianceResponse { + deviceSoftwareCompliance: DeviceSoftwareCompliance | undefined; +} + +export interface QueryAllDeviceSoftwareComplianceRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllDeviceSoftwareComplianceResponse { + deviceSoftwareCompliance: DeviceSoftwareCompliance[]; + pagination: PageResponse | undefined; +} + +function createBaseQueryGetComplianceInfoRequest(): QueryGetComplianceInfoRequest { + return { vid: 0, pid: 0, softwareVersion: 0, certificationType: "" }; +} + +export const QueryGetComplianceInfoRequest = { + encode(message: QueryGetComplianceInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(16).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(24).uint32(message.softwareVersion); + } + if (message.certificationType !== "") { + writer.uint32(34).string(message.certificationType); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetComplianceInfoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetComplianceInfoRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.pid = reader.int32(); + break; + case 3: + message.softwareVersion = reader.uint32(); + break; + case 4: + message.certificationType = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetComplianceInfoRequest { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + certificationType: isSet(object.certificationType) ? String(object.certificationType) : "", + }; + }, + + toJSON(message: QueryGetComplianceInfoRequest): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.certificationType !== undefined && (obj.certificationType = message.certificationType); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetComplianceInfoRequest { + const message = createBaseQueryGetComplianceInfoRequest(); + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.certificationType = object.certificationType ?? ""; + return message; + }, +}; + +function createBaseQueryGetComplianceInfoResponse(): QueryGetComplianceInfoResponse { + return { complianceInfo: undefined }; +} + +export const QueryGetComplianceInfoResponse = { + encode(message: QueryGetComplianceInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.complianceInfo !== undefined) { + ComplianceInfo.encode(message.complianceInfo, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetComplianceInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetComplianceInfoResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.complianceInfo = ComplianceInfo.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetComplianceInfoResponse { + return { + complianceInfo: isSet(object.complianceInfo) ? ComplianceInfo.fromJSON(object.complianceInfo) : undefined, + }; + }, + + toJSON(message: QueryGetComplianceInfoResponse): unknown { + const obj: any = {}; + message.complianceInfo !== undefined + && (obj.complianceInfo = message.complianceInfo ? ComplianceInfo.toJSON(message.complianceInfo) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetComplianceInfoResponse { + const message = createBaseQueryGetComplianceInfoResponse(); + message.complianceInfo = (object.complianceInfo !== undefined && object.complianceInfo !== null) + ? ComplianceInfo.fromPartial(object.complianceInfo) + : undefined; + return message; + }, +}; + +function createBaseQueryAllComplianceInfoRequest(): QueryAllComplianceInfoRequest { + return { pagination: undefined }; +} + +export const QueryAllComplianceInfoRequest = { + encode(message: QueryAllComplianceInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllComplianceInfoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllComplianceInfoRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllComplianceInfoRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllComplianceInfoRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllComplianceInfoRequest { + const message = createBaseQueryAllComplianceInfoRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllComplianceInfoResponse(): QueryAllComplianceInfoResponse { + return { complianceInfo: [], pagination: undefined }; +} + +export const QueryAllComplianceInfoResponse = { + encode(message: QueryAllComplianceInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.complianceInfo) { + ComplianceInfo.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllComplianceInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllComplianceInfoResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.complianceInfo.push(ComplianceInfo.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllComplianceInfoResponse { + return { + complianceInfo: Array.isArray(object?.complianceInfo) + ? object.complianceInfo.map((e: any) => ComplianceInfo.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllComplianceInfoResponse): unknown { + const obj: any = {}; + if (message.complianceInfo) { + obj.complianceInfo = message.complianceInfo.map((e) => e ? ComplianceInfo.toJSON(e) : undefined); + } else { + obj.complianceInfo = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllComplianceInfoResponse { + const message = createBaseQueryAllComplianceInfoResponse(); + message.complianceInfo = object.complianceInfo?.map((e) => ComplianceInfo.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetCertifiedModelRequest(): QueryGetCertifiedModelRequest { + return { vid: 0, pid: 0, softwareVersion: 0, certificationType: "" }; +} + +export const QueryGetCertifiedModelRequest = { + encode(message: QueryGetCertifiedModelRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(16).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(24).uint32(message.softwareVersion); + } + if (message.certificationType !== "") { + writer.uint32(34).string(message.certificationType); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetCertifiedModelRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetCertifiedModelRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.pid = reader.int32(); + break; + case 3: + message.softwareVersion = reader.uint32(); + break; + case 4: + message.certificationType = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetCertifiedModelRequest { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + certificationType: isSet(object.certificationType) ? String(object.certificationType) : "", + }; + }, + + toJSON(message: QueryGetCertifiedModelRequest): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.certificationType !== undefined && (obj.certificationType = message.certificationType); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetCertifiedModelRequest { + const message = createBaseQueryGetCertifiedModelRequest(); + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.certificationType = object.certificationType ?? ""; + return message; + }, +}; + +function createBaseQueryGetCertifiedModelResponse(): QueryGetCertifiedModelResponse { + return { certifiedModel: undefined }; +} + +export const QueryGetCertifiedModelResponse = { + encode(message: QueryGetCertifiedModelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.certifiedModel !== undefined) { + CertifiedModel.encode(message.certifiedModel, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetCertifiedModelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetCertifiedModelResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.certifiedModel = CertifiedModel.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetCertifiedModelResponse { + return { + certifiedModel: isSet(object.certifiedModel) ? CertifiedModel.fromJSON(object.certifiedModel) : undefined, + }; + }, + + toJSON(message: QueryGetCertifiedModelResponse): unknown { + const obj: any = {}; + message.certifiedModel !== undefined + && (obj.certifiedModel = message.certifiedModel ? CertifiedModel.toJSON(message.certifiedModel) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetCertifiedModelResponse { + const message = createBaseQueryGetCertifiedModelResponse(); + message.certifiedModel = (object.certifiedModel !== undefined && object.certifiedModel !== null) + ? CertifiedModel.fromPartial(object.certifiedModel) + : undefined; + return message; + }, +}; + +function createBaseQueryAllCertifiedModelRequest(): QueryAllCertifiedModelRequest { + return { pagination: undefined }; +} + +export const QueryAllCertifiedModelRequest = { + encode(message: QueryAllCertifiedModelRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllCertifiedModelRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllCertifiedModelRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllCertifiedModelRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllCertifiedModelRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllCertifiedModelRequest { + const message = createBaseQueryAllCertifiedModelRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllCertifiedModelResponse(): QueryAllCertifiedModelResponse { + return { certifiedModel: [], pagination: undefined }; +} + +export const QueryAllCertifiedModelResponse = { + encode(message: QueryAllCertifiedModelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.certifiedModel) { + CertifiedModel.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllCertifiedModelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllCertifiedModelResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.certifiedModel.push(CertifiedModel.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllCertifiedModelResponse { + return { + certifiedModel: Array.isArray(object?.certifiedModel) + ? object.certifiedModel.map((e: any) => CertifiedModel.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllCertifiedModelResponse): unknown { + const obj: any = {}; + if (message.certifiedModel) { + obj.certifiedModel = message.certifiedModel.map((e) => e ? CertifiedModel.toJSON(e) : undefined); + } else { + obj.certifiedModel = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllCertifiedModelResponse { + const message = createBaseQueryAllCertifiedModelResponse(); + message.certifiedModel = object.certifiedModel?.map((e) => CertifiedModel.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetRevokedModelRequest(): QueryGetRevokedModelRequest { + return { vid: 0, pid: 0, softwareVersion: 0, certificationType: "" }; +} + +export const QueryGetRevokedModelRequest = { + encode(message: QueryGetRevokedModelRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(16).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(24).uint32(message.softwareVersion); + } + if (message.certificationType !== "") { + writer.uint32(34).string(message.certificationType); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRevokedModelRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRevokedModelRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.pid = reader.int32(); + break; + case 3: + message.softwareVersion = reader.uint32(); + break; + case 4: + message.certificationType = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRevokedModelRequest { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + certificationType: isSet(object.certificationType) ? String(object.certificationType) : "", + }; + }, + + toJSON(message: QueryGetRevokedModelRequest): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.certificationType !== undefined && (obj.certificationType = message.certificationType); + return obj; + }, + + fromPartial, I>>(object: I): QueryGetRevokedModelRequest { + const message = createBaseQueryGetRevokedModelRequest(); + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.certificationType = object.certificationType ?? ""; + return message; + }, +}; + +function createBaseQueryGetRevokedModelResponse(): QueryGetRevokedModelResponse { + return { revokedModel: undefined }; +} + +export const QueryGetRevokedModelResponse = { + encode(message: QueryGetRevokedModelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.revokedModel !== undefined) { + RevokedModel.encode(message.revokedModel, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRevokedModelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRevokedModelResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.revokedModel = RevokedModel.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRevokedModelResponse { + return { revokedModel: isSet(object.revokedModel) ? RevokedModel.fromJSON(object.revokedModel) : undefined }; + }, + + toJSON(message: QueryGetRevokedModelResponse): unknown { + const obj: any = {}; + message.revokedModel !== undefined + && (obj.revokedModel = message.revokedModel ? RevokedModel.toJSON(message.revokedModel) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryGetRevokedModelResponse { + const message = createBaseQueryGetRevokedModelResponse(); + message.revokedModel = (object.revokedModel !== undefined && object.revokedModel !== null) + ? RevokedModel.fromPartial(object.revokedModel) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRevokedModelRequest(): QueryAllRevokedModelRequest { + return { pagination: undefined }; +} + +export const QueryAllRevokedModelRequest = { + encode(message: QueryAllRevokedModelRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRevokedModelRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRevokedModelRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRevokedModelRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllRevokedModelRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryAllRevokedModelRequest { + const message = createBaseQueryAllRevokedModelRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRevokedModelResponse(): QueryAllRevokedModelResponse { + return { revokedModel: [], pagination: undefined }; +} + +export const QueryAllRevokedModelResponse = { + encode(message: QueryAllRevokedModelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.revokedModel) { + RevokedModel.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRevokedModelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRevokedModelResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.revokedModel.push(RevokedModel.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRevokedModelResponse { + return { + revokedModel: Array.isArray(object?.revokedModel) + ? object.revokedModel.map((e: any) => RevokedModel.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllRevokedModelResponse): unknown { + const obj: any = {}; + if (message.revokedModel) { + obj.revokedModel = message.revokedModel.map((e) => e ? RevokedModel.toJSON(e) : undefined); + } else { + obj.revokedModel = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryAllRevokedModelResponse { + const message = createBaseQueryAllRevokedModelResponse(); + message.revokedModel = object.revokedModel?.map((e) => RevokedModel.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetProvisionalModelRequest(): QueryGetProvisionalModelRequest { + return { vid: 0, pid: 0, softwareVersion: 0, certificationType: "" }; +} + +export const QueryGetProvisionalModelRequest = { + encode(message: QueryGetProvisionalModelRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(16).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(24).uint32(message.softwareVersion); + } + if (message.certificationType !== "") { + writer.uint32(34).string(message.certificationType); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetProvisionalModelRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetProvisionalModelRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.pid = reader.int32(); + break; + case 3: + message.softwareVersion = reader.uint32(); + break; + case 4: + message.certificationType = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetProvisionalModelRequest { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + certificationType: isSet(object.certificationType) ? String(object.certificationType) : "", + }; + }, + + toJSON(message: QueryGetProvisionalModelRequest): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.certificationType !== undefined && (obj.certificationType = message.certificationType); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetProvisionalModelRequest { + const message = createBaseQueryGetProvisionalModelRequest(); + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.certificationType = object.certificationType ?? ""; + return message; + }, +}; + +function createBaseQueryGetProvisionalModelResponse(): QueryGetProvisionalModelResponse { + return { provisionalModel: undefined }; +} + +export const QueryGetProvisionalModelResponse = { + encode(message: QueryGetProvisionalModelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.provisionalModel !== undefined) { + ProvisionalModel.encode(message.provisionalModel, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetProvisionalModelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetProvisionalModelResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.provisionalModel = ProvisionalModel.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetProvisionalModelResponse { + return { + provisionalModel: isSet(object.provisionalModel) ? ProvisionalModel.fromJSON(object.provisionalModel) : undefined, + }; + }, + + toJSON(message: QueryGetProvisionalModelResponse): unknown { + const obj: any = {}; + message.provisionalModel !== undefined && (obj.provisionalModel = message.provisionalModel + ? ProvisionalModel.toJSON(message.provisionalModel) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetProvisionalModelResponse { + const message = createBaseQueryGetProvisionalModelResponse(); + message.provisionalModel = (object.provisionalModel !== undefined && object.provisionalModel !== null) + ? ProvisionalModel.fromPartial(object.provisionalModel) + : undefined; + return message; + }, +}; + +function createBaseQueryAllProvisionalModelRequest(): QueryAllProvisionalModelRequest { + return { pagination: undefined }; +} + +export const QueryAllProvisionalModelRequest = { + encode(message: QueryAllProvisionalModelRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllProvisionalModelRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllProvisionalModelRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllProvisionalModelRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllProvisionalModelRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllProvisionalModelRequest { + const message = createBaseQueryAllProvisionalModelRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllProvisionalModelResponse(): QueryAllProvisionalModelResponse { + return { provisionalModel: [], pagination: undefined }; +} + +export const QueryAllProvisionalModelResponse = { + encode(message: QueryAllProvisionalModelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.provisionalModel) { + ProvisionalModel.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllProvisionalModelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllProvisionalModelResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.provisionalModel.push(ProvisionalModel.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllProvisionalModelResponse { + return { + provisionalModel: Array.isArray(object?.provisionalModel) + ? object.provisionalModel.map((e: any) => ProvisionalModel.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllProvisionalModelResponse): unknown { + const obj: any = {}; + if (message.provisionalModel) { + obj.provisionalModel = message.provisionalModel.map((e) => e ? ProvisionalModel.toJSON(e) : undefined); + } else { + obj.provisionalModel = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllProvisionalModelResponse { + const message = createBaseQueryAllProvisionalModelResponse(); + message.provisionalModel = object.provisionalModel?.map((e) => ProvisionalModel.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetDeviceSoftwareComplianceRequest(): QueryGetDeviceSoftwareComplianceRequest { + return { cDCertificateId: "" }; +} + +export const QueryGetDeviceSoftwareComplianceRequest = { + encode(message: QueryGetDeviceSoftwareComplianceRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.cDCertificateId !== "") { + writer.uint32(10).string(message.cDCertificateId); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetDeviceSoftwareComplianceRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetDeviceSoftwareComplianceRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.cDCertificateId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetDeviceSoftwareComplianceRequest { + return { cDCertificateId: isSet(object.cDCertificateId) ? String(object.cDCertificateId) : "" }; + }, + + toJSON(message: QueryGetDeviceSoftwareComplianceRequest): unknown { + const obj: any = {}; + message.cDCertificateId !== undefined && (obj.cDCertificateId = message.cDCertificateId); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetDeviceSoftwareComplianceRequest { + const message = createBaseQueryGetDeviceSoftwareComplianceRequest(); + message.cDCertificateId = object.cDCertificateId ?? ""; + return message; + }, +}; + +function createBaseQueryGetDeviceSoftwareComplianceResponse(): QueryGetDeviceSoftwareComplianceResponse { + return { deviceSoftwareCompliance: undefined }; +} + +export const QueryGetDeviceSoftwareComplianceResponse = { + encode(message: QueryGetDeviceSoftwareComplianceResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deviceSoftwareCompliance !== undefined) { + DeviceSoftwareCompliance.encode(message.deviceSoftwareCompliance, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetDeviceSoftwareComplianceResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetDeviceSoftwareComplianceResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deviceSoftwareCompliance = DeviceSoftwareCompliance.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetDeviceSoftwareComplianceResponse { + return { + deviceSoftwareCompliance: isSet(object.deviceSoftwareCompliance) + ? DeviceSoftwareCompliance.fromJSON(object.deviceSoftwareCompliance) + : undefined, + }; + }, + + toJSON(message: QueryGetDeviceSoftwareComplianceResponse): unknown { + const obj: any = {}; + message.deviceSoftwareCompliance !== undefined && (obj.deviceSoftwareCompliance = message.deviceSoftwareCompliance + ? DeviceSoftwareCompliance.toJSON(message.deviceSoftwareCompliance) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetDeviceSoftwareComplianceResponse { + const message = createBaseQueryGetDeviceSoftwareComplianceResponse(); + message.deviceSoftwareCompliance = + (object.deviceSoftwareCompliance !== undefined && object.deviceSoftwareCompliance !== null) + ? DeviceSoftwareCompliance.fromPartial(object.deviceSoftwareCompliance) + : undefined; + return message; + }, +}; + +function createBaseQueryAllDeviceSoftwareComplianceRequest(): QueryAllDeviceSoftwareComplianceRequest { + return { pagination: undefined }; +} + +export const QueryAllDeviceSoftwareComplianceRequest = { + encode(message: QueryAllDeviceSoftwareComplianceRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllDeviceSoftwareComplianceRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllDeviceSoftwareComplianceRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllDeviceSoftwareComplianceRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllDeviceSoftwareComplianceRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllDeviceSoftwareComplianceRequest { + const message = createBaseQueryAllDeviceSoftwareComplianceRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllDeviceSoftwareComplianceResponse(): QueryAllDeviceSoftwareComplianceResponse { + return { deviceSoftwareCompliance: [], pagination: undefined }; +} + +export const QueryAllDeviceSoftwareComplianceResponse = { + encode(message: QueryAllDeviceSoftwareComplianceResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.deviceSoftwareCompliance) { + DeviceSoftwareCompliance.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllDeviceSoftwareComplianceResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllDeviceSoftwareComplianceResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deviceSoftwareCompliance.push(DeviceSoftwareCompliance.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllDeviceSoftwareComplianceResponse { + return { + deviceSoftwareCompliance: Array.isArray(object?.deviceSoftwareCompliance) + ? object.deviceSoftwareCompliance.map((e: any) => DeviceSoftwareCompliance.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllDeviceSoftwareComplianceResponse): unknown { + const obj: any = {}; + if (message.deviceSoftwareCompliance) { + obj.deviceSoftwareCompliance = message.deviceSoftwareCompliance.map((e) => + e ? DeviceSoftwareCompliance.toJSON(e) : undefined + ); + } else { + obj.deviceSoftwareCompliance = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllDeviceSoftwareComplianceResponse { + const message = createBaseQueryAllDeviceSoftwareComplianceResponse(); + message.deviceSoftwareCompliance = + object.deviceSoftwareCompliance?.map((e) => DeviceSoftwareCompliance.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +/** Query defines the gRPC querier service. */ +export interface Query { + /** Queries a ComplianceInfo by index. */ + ComplianceInfo(request: QueryGetComplianceInfoRequest): Promise; + /** Queries a list of ComplianceInfo items. */ + ComplianceInfoAll(request: QueryAllComplianceInfoRequest): Promise; + /** Queries a CertifiedModel by index. */ + CertifiedModel(request: QueryGetCertifiedModelRequest): Promise; + /** Queries a list of CertifiedModel items. */ + CertifiedModelAll(request: QueryAllCertifiedModelRequest): Promise; + /** Queries a RevokedModel by index. */ + RevokedModel(request: QueryGetRevokedModelRequest): Promise; + /** Queries a list of RevokedModel items. */ + RevokedModelAll(request: QueryAllRevokedModelRequest): Promise; + /** Queries a ProvisionalModel by index. */ + ProvisionalModel(request: QueryGetProvisionalModelRequest): Promise; + /** Queries a list of ProvisionalModel items. */ + ProvisionalModelAll(request: QueryAllProvisionalModelRequest): Promise; + /** Queries a DeviceSoftwareCompliance by index. */ + DeviceSoftwareCompliance( + request: QueryGetDeviceSoftwareComplianceRequest, + ): Promise; + /** Queries a list of DeviceSoftwareCompliance items. */ + DeviceSoftwareComplianceAll( + request: QueryAllDeviceSoftwareComplianceRequest, + ): Promise; +} + +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.ComplianceInfo = this.ComplianceInfo.bind(this); + this.ComplianceInfoAll = this.ComplianceInfoAll.bind(this); + this.CertifiedModel = this.CertifiedModel.bind(this); + this.CertifiedModelAll = this.CertifiedModelAll.bind(this); + this.RevokedModel = this.RevokedModel.bind(this); + this.RevokedModelAll = this.RevokedModelAll.bind(this); + this.ProvisionalModel = this.ProvisionalModel.bind(this); + this.ProvisionalModelAll = this.ProvisionalModelAll.bind(this); + this.DeviceSoftwareCompliance = this.DeviceSoftwareCompliance.bind(this); + this.DeviceSoftwareComplianceAll = this.DeviceSoftwareComplianceAll.bind(this); + } + ComplianceInfo(request: QueryGetComplianceInfoRequest): Promise { + const data = QueryGetComplianceInfoRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.compliance.Query", + "ComplianceInfo", + data, + ); + return promise.then((data) => QueryGetComplianceInfoResponse.decode(new _m0.Reader(data))); + } + + ComplianceInfoAll(request: QueryAllComplianceInfoRequest): Promise { + const data = QueryAllComplianceInfoRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.compliance.Query", + "ComplianceInfoAll", + data, + ); + return promise.then((data) => QueryAllComplianceInfoResponse.decode(new _m0.Reader(data))); + } + + CertifiedModel(request: QueryGetCertifiedModelRequest): Promise { + const data = QueryGetCertifiedModelRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.compliance.Query", + "CertifiedModel", + data, + ); + return promise.then((data) => QueryGetCertifiedModelResponse.decode(new _m0.Reader(data))); + } + + CertifiedModelAll(request: QueryAllCertifiedModelRequest): Promise { + const data = QueryAllCertifiedModelRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.compliance.Query", + "CertifiedModelAll", + data, + ); + return promise.then((data) => QueryAllCertifiedModelResponse.decode(new _m0.Reader(data))); + } + + RevokedModel(request: QueryGetRevokedModelRequest): Promise { + const data = QueryGetRevokedModelRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.compliance.Query", + "RevokedModel", + data, + ); + return promise.then((data) => QueryGetRevokedModelResponse.decode(new _m0.Reader(data))); + } + + RevokedModelAll(request: QueryAllRevokedModelRequest): Promise { + const data = QueryAllRevokedModelRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.compliance.Query", + "RevokedModelAll", + data, + ); + return promise.then((data) => QueryAllRevokedModelResponse.decode(new _m0.Reader(data))); + } + + ProvisionalModel(request: QueryGetProvisionalModelRequest): Promise { + const data = QueryGetProvisionalModelRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.compliance.Query", + "ProvisionalModel", + data, + ); + return promise.then((data) => QueryGetProvisionalModelResponse.decode(new _m0.Reader(data))); + } + + ProvisionalModelAll(request: QueryAllProvisionalModelRequest): Promise { + const data = QueryAllProvisionalModelRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.compliance.Query", + "ProvisionalModelAll", + data, + ); + return promise.then((data) => QueryAllProvisionalModelResponse.decode(new _m0.Reader(data))); + } + + DeviceSoftwareCompliance( + request: QueryGetDeviceSoftwareComplianceRequest, + ): Promise { + const data = QueryGetDeviceSoftwareComplianceRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.compliance.Query", + "DeviceSoftwareCompliance", + data, + ); + return promise.then((data) => QueryGetDeviceSoftwareComplianceResponse.decode(new _m0.Reader(data))); + } + + DeviceSoftwareComplianceAll( + request: QueryAllDeviceSoftwareComplianceRequest, + ): Promise { + const data = QueryAllDeviceSoftwareComplianceRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.compliance.Query", + "DeviceSoftwareComplianceAll", + data, + ); + return promise.then((data) => QueryAllDeviceSoftwareComplianceResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/revoked_model.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/revoked_model.ts new file mode 100644 index 000000000..3dea99aa0 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/revoked_model.ts @@ -0,0 +1,112 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.compliance"; + +export interface RevokedModel { + vid: number; + pid: number; + softwareVersion: number; + certificationType: string; + value: boolean; +} + +function createBaseRevokedModel(): RevokedModel { + return { vid: 0, pid: 0, softwareVersion: 0, certificationType: "", value: false }; +} + +export const RevokedModel = { + encode(message: RevokedModel, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(16).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(24).uint32(message.softwareVersion); + } + if (message.certificationType !== "") { + writer.uint32(34).string(message.certificationType); + } + if (message.value === true) { + writer.uint32(40).bool(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RevokedModel { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRevokedModel(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.pid = reader.int32(); + break; + case 3: + message.softwareVersion = reader.uint32(); + break; + case 4: + message.certificationType = reader.string(); + break; + case 5: + message.value = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RevokedModel { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + certificationType: isSet(object.certificationType) ? String(object.certificationType) : "", + value: isSet(object.value) ? Boolean(object.value) : false, + }; + }, + + toJSON(message: RevokedModel): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.certificationType !== undefined && (obj.certificationType = message.certificationType); + message.value !== undefined && (obj.value = message.value); + return obj; + }, + + fromPartial, I>>(object: I): RevokedModel { + const message = createBaseRevokedModel(); + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.certificationType = object.certificationType ?? ""; + message.value = object.value ?? false; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/tx.ts b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/tx.ts new file mode 100644 index 000000000..a2b3273a1 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.compliance/types/zigbeealliance/distributedcomplianceledger/compliance/tx.ts @@ -0,0 +1,1410 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.compliance"; + +export interface MsgCertifyModel { + signer: string; + vid: number; + pid: number; + softwareVersion: number; + softwareVersionString: string; + cDVersionNumber: number; + certificationDate: string; + certificationType: string; + reason: string; + programTypeVersion: string; + cDCertificateId: string; + familyId: string; + supportedClusters: string; + compliantPlatformUsed: string; + compliantPlatformVersion: string; + OSVersion: string; + certificationRoute: string; + programType: string; + transport: string; + parentChild: string; + certificationIdOfSoftwareComponent: string; + schemaVersion: number; +} + +export interface MsgCertifyModelResponse { +} + +export interface MsgRevokeModel { + signer: string; + vid: number; + pid: number; + softwareVersion: number; + softwareVersionString: string; + cDVersionNumber: number; + revocationDate: string; + certificationType: string; + reason: string; + schemaVersion: number; +} + +export interface MsgRevokeModelResponse { +} + +export interface MsgProvisionModel { + signer: string; + vid: number; + pid: number; + softwareVersion: number; + softwareVersionString: string; + cDVersionNumber: number; + provisionalDate: string; + certificationType: string; + reason: string; + programTypeVersion: string; + cDCertificateId: string; + familyId: string; + supportedClusters: string; + compliantPlatformUsed: string; + compliantPlatformVersion: string; + OSVersion: string; + certificationRoute: string; + programType: string; + transport: string; + parentChild: string; + certificationIdOfSoftwareComponent: string; + schemaVersion: number; +} + +export interface MsgProvisionModelResponse { +} + +export interface MsgUpdateComplianceInfo { + creator: string; + vid: number; + pid: number; + softwareVersion: number; + certificationType: string; + cDVersionNumber: string; + date: string; + reason: string; + owner: string; + cDCertificateId: string; + certificationRoute: string; + programType: string; + programTypeVersion: string; + compliantPlatformUsed: string; + compliantPlatformVersion: string; + transport: string; + familyId: string; + supportedClusters: string; + OSVersion: string; + parentChild: string; + certificationIdOfSoftwareComponent: string; + schemaVersion: number; +} + +export interface MsgUpdateComplianceInfoResponse { +} + +export interface MsgDeleteComplianceInfo { + creator: string; + vid: number; + pid: number; + softwareVersion: number; + certificationType: string; +} + +export interface MsgDeleteComplianceInfoResponse { +} + +function createBaseMsgCertifyModel(): MsgCertifyModel { + return { + signer: "", + vid: 0, + pid: 0, + softwareVersion: 0, + softwareVersionString: "", + cDVersionNumber: 0, + certificationDate: "", + certificationType: "", + reason: "", + programTypeVersion: "", + cDCertificateId: "", + familyId: "", + supportedClusters: "", + compliantPlatformUsed: "", + compliantPlatformVersion: "", + OSVersion: "", + certificationRoute: "", + programType: "", + transport: "", + parentChild: "", + certificationIdOfSoftwareComponent: "", + schemaVersion: 0, + }; +} + +export const MsgCertifyModel = { + encode(message: MsgCertifyModel, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.vid !== 0) { + writer.uint32(16).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(24).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(32).uint32(message.softwareVersion); + } + if (message.softwareVersionString !== "") { + writer.uint32(42).string(message.softwareVersionString); + } + if (message.cDVersionNumber !== 0) { + writer.uint32(48).uint32(message.cDVersionNumber); + } + if (message.certificationDate !== "") { + writer.uint32(58).string(message.certificationDate); + } + if (message.certificationType !== "") { + writer.uint32(66).string(message.certificationType); + } + if (message.reason !== "") { + writer.uint32(74).string(message.reason); + } + if (message.programTypeVersion !== "") { + writer.uint32(82).string(message.programTypeVersion); + } + if (message.cDCertificateId !== "") { + writer.uint32(90).string(message.cDCertificateId); + } + if (message.familyId !== "") { + writer.uint32(98).string(message.familyId); + } + if (message.supportedClusters !== "") { + writer.uint32(106).string(message.supportedClusters); + } + if (message.compliantPlatformUsed !== "") { + writer.uint32(114).string(message.compliantPlatformUsed); + } + if (message.compliantPlatformVersion !== "") { + writer.uint32(122).string(message.compliantPlatformVersion); + } + if (message.OSVersion !== "") { + writer.uint32(130).string(message.OSVersion); + } + if (message.certificationRoute !== "") { + writer.uint32(138).string(message.certificationRoute); + } + if (message.programType !== "") { + writer.uint32(146).string(message.programType); + } + if (message.transport !== "") { + writer.uint32(154).string(message.transport); + } + if (message.parentChild !== "") { + writer.uint32(162).string(message.parentChild); + } + if (message.certificationIdOfSoftwareComponent !== "") { + writer.uint32(170).string(message.certificationIdOfSoftwareComponent); + } + if (message.schemaVersion !== 0) { + writer.uint32(176).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCertifyModel { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCertifyModel(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.vid = reader.int32(); + break; + case 3: + message.pid = reader.int32(); + break; + case 4: + message.softwareVersion = reader.uint32(); + break; + case 5: + message.softwareVersionString = reader.string(); + break; + case 6: + message.cDVersionNumber = reader.uint32(); + break; + case 7: + message.certificationDate = reader.string(); + break; + case 8: + message.certificationType = reader.string(); + break; + case 9: + message.reason = reader.string(); + break; + case 10: + message.programTypeVersion = reader.string(); + break; + case 11: + message.cDCertificateId = reader.string(); + break; + case 12: + message.familyId = reader.string(); + break; + case 13: + message.supportedClusters = reader.string(); + break; + case 14: + message.compliantPlatformUsed = reader.string(); + break; + case 15: + message.compliantPlatformVersion = reader.string(); + break; + case 16: + message.OSVersion = reader.string(); + break; + case 17: + message.certificationRoute = reader.string(); + break; + case 18: + message.programType = reader.string(); + break; + case 19: + message.transport = reader.string(); + break; + case 20: + message.parentChild = reader.string(); + break; + case 21: + message.certificationIdOfSoftwareComponent = reader.string(); + break; + case 22: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgCertifyModel { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + softwareVersionString: isSet(object.softwareVersionString) ? String(object.softwareVersionString) : "", + cDVersionNumber: isSet(object.cDVersionNumber) ? Number(object.cDVersionNumber) : 0, + certificationDate: isSet(object.certificationDate) ? String(object.certificationDate) : "", + certificationType: isSet(object.certificationType) ? String(object.certificationType) : "", + reason: isSet(object.reason) ? String(object.reason) : "", + programTypeVersion: isSet(object.programTypeVersion) ? String(object.programTypeVersion) : "", + cDCertificateId: isSet(object.cDCertificateId) ? String(object.cDCertificateId) : "", + familyId: isSet(object.familyId) ? String(object.familyId) : "", + supportedClusters: isSet(object.supportedClusters) ? String(object.supportedClusters) : "", + compliantPlatformUsed: isSet(object.compliantPlatformUsed) ? String(object.compliantPlatformUsed) : "", + compliantPlatformVersion: isSet(object.compliantPlatformVersion) ? String(object.compliantPlatformVersion) : "", + OSVersion: isSet(object.OSVersion) ? String(object.OSVersion) : "", + certificationRoute: isSet(object.certificationRoute) ? String(object.certificationRoute) : "", + programType: isSet(object.programType) ? String(object.programType) : "", + transport: isSet(object.transport) ? String(object.transport) : "", + parentChild: isSet(object.parentChild) ? String(object.parentChild) : "", + certificationIdOfSoftwareComponent: isSet(object.certificationIdOfSoftwareComponent) + ? String(object.certificationIdOfSoftwareComponent) + : "", + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgCertifyModel): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.softwareVersionString !== undefined && (obj.softwareVersionString = message.softwareVersionString); + message.cDVersionNumber !== undefined && (obj.cDVersionNumber = Math.round(message.cDVersionNumber)); + message.certificationDate !== undefined && (obj.certificationDate = message.certificationDate); + message.certificationType !== undefined && (obj.certificationType = message.certificationType); + message.reason !== undefined && (obj.reason = message.reason); + message.programTypeVersion !== undefined && (obj.programTypeVersion = message.programTypeVersion); + message.cDCertificateId !== undefined && (obj.cDCertificateId = message.cDCertificateId); + message.familyId !== undefined && (obj.familyId = message.familyId); + message.supportedClusters !== undefined && (obj.supportedClusters = message.supportedClusters); + message.compliantPlatformUsed !== undefined && (obj.compliantPlatformUsed = message.compliantPlatformUsed); + message.compliantPlatformVersion !== undefined && (obj.compliantPlatformVersion = message.compliantPlatformVersion); + message.OSVersion !== undefined && (obj.OSVersion = message.OSVersion); + message.certificationRoute !== undefined && (obj.certificationRoute = message.certificationRoute); + message.programType !== undefined && (obj.programType = message.programType); + message.transport !== undefined && (obj.transport = message.transport); + message.parentChild !== undefined && (obj.parentChild = message.parentChild); + message.certificationIdOfSoftwareComponent !== undefined + && (obj.certificationIdOfSoftwareComponent = message.certificationIdOfSoftwareComponent); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgCertifyModel { + const message = createBaseMsgCertifyModel(); + message.signer = object.signer ?? ""; + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.softwareVersionString = object.softwareVersionString ?? ""; + message.cDVersionNumber = object.cDVersionNumber ?? 0; + message.certificationDate = object.certificationDate ?? ""; + message.certificationType = object.certificationType ?? ""; + message.reason = object.reason ?? ""; + message.programTypeVersion = object.programTypeVersion ?? ""; + message.cDCertificateId = object.cDCertificateId ?? ""; + message.familyId = object.familyId ?? ""; + message.supportedClusters = object.supportedClusters ?? ""; + message.compliantPlatformUsed = object.compliantPlatformUsed ?? ""; + message.compliantPlatformVersion = object.compliantPlatformVersion ?? ""; + message.OSVersion = object.OSVersion ?? ""; + message.certificationRoute = object.certificationRoute ?? ""; + message.programType = object.programType ?? ""; + message.transport = object.transport ?? ""; + message.parentChild = object.parentChild ?? ""; + message.certificationIdOfSoftwareComponent = object.certificationIdOfSoftwareComponent ?? ""; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgCertifyModelResponse(): MsgCertifyModelResponse { + return {}; +} + +export const MsgCertifyModelResponse = { + encode(_: MsgCertifyModelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCertifyModelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCertifyModelResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgCertifyModelResponse { + return {}; + }, + + toJSON(_: MsgCertifyModelResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgCertifyModelResponse { + const message = createBaseMsgCertifyModelResponse(); + return message; + }, +}; + +function createBaseMsgRevokeModel(): MsgRevokeModel { + return { + signer: "", + vid: 0, + pid: 0, + softwareVersion: 0, + softwareVersionString: "", + cDVersionNumber: 0, + revocationDate: "", + certificationType: "", + reason: "", + schemaVersion: 0, + }; +} + +export const MsgRevokeModel = { + encode(message: MsgRevokeModel, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.vid !== 0) { + writer.uint32(16).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(24).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(32).uint32(message.softwareVersion); + } + if (message.softwareVersionString !== "") { + writer.uint32(42).string(message.softwareVersionString); + } + if (message.cDVersionNumber !== 0) { + writer.uint32(48).uint32(message.cDVersionNumber); + } + if (message.revocationDate !== "") { + writer.uint32(58).string(message.revocationDate); + } + if (message.certificationType !== "") { + writer.uint32(66).string(message.certificationType); + } + if (message.reason !== "") { + writer.uint32(74).string(message.reason); + } + if (message.schemaVersion !== 0) { + writer.uint32(80).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevokeModel { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRevokeModel(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.vid = reader.int32(); + break; + case 3: + message.pid = reader.int32(); + break; + case 4: + message.softwareVersion = reader.uint32(); + break; + case 5: + message.softwareVersionString = reader.string(); + break; + case 6: + message.cDVersionNumber = reader.uint32(); + break; + case 7: + message.revocationDate = reader.string(); + break; + case 8: + message.certificationType = reader.string(); + break; + case 9: + message.reason = reader.string(); + break; + case 10: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgRevokeModel { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + softwareVersionString: isSet(object.softwareVersionString) ? String(object.softwareVersionString) : "", + cDVersionNumber: isSet(object.cDVersionNumber) ? Number(object.cDVersionNumber) : 0, + revocationDate: isSet(object.revocationDate) ? String(object.revocationDate) : "", + certificationType: isSet(object.certificationType) ? String(object.certificationType) : "", + reason: isSet(object.reason) ? String(object.reason) : "", + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgRevokeModel): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.softwareVersionString !== undefined && (obj.softwareVersionString = message.softwareVersionString); + message.cDVersionNumber !== undefined && (obj.cDVersionNumber = Math.round(message.cDVersionNumber)); + message.revocationDate !== undefined && (obj.revocationDate = message.revocationDate); + message.certificationType !== undefined && (obj.certificationType = message.certificationType); + message.reason !== undefined && (obj.reason = message.reason); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgRevokeModel { + const message = createBaseMsgRevokeModel(); + message.signer = object.signer ?? ""; + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.softwareVersionString = object.softwareVersionString ?? ""; + message.cDVersionNumber = object.cDVersionNumber ?? 0; + message.revocationDate = object.revocationDate ?? ""; + message.certificationType = object.certificationType ?? ""; + message.reason = object.reason ?? ""; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgRevokeModelResponse(): MsgRevokeModelResponse { + return {}; +} + +export const MsgRevokeModelResponse = { + encode(_: MsgRevokeModelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevokeModelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRevokeModelResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgRevokeModelResponse { + return {}; + }, + + toJSON(_: MsgRevokeModelResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgRevokeModelResponse { + const message = createBaseMsgRevokeModelResponse(); + return message; + }, +}; + +function createBaseMsgProvisionModel(): MsgProvisionModel { + return { + signer: "", + vid: 0, + pid: 0, + softwareVersion: 0, + softwareVersionString: "", + cDVersionNumber: 0, + provisionalDate: "", + certificationType: "", + reason: "", + programTypeVersion: "", + cDCertificateId: "", + familyId: "", + supportedClusters: "", + compliantPlatformUsed: "", + compliantPlatformVersion: "", + OSVersion: "", + certificationRoute: "", + programType: "", + transport: "", + parentChild: "", + certificationIdOfSoftwareComponent: "", + schemaVersion: 0, + }; +} + +export const MsgProvisionModel = { + encode(message: MsgProvisionModel, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.vid !== 0) { + writer.uint32(16).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(24).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(32).uint32(message.softwareVersion); + } + if (message.softwareVersionString !== "") { + writer.uint32(42).string(message.softwareVersionString); + } + if (message.cDVersionNumber !== 0) { + writer.uint32(48).uint32(message.cDVersionNumber); + } + if (message.provisionalDate !== "") { + writer.uint32(58).string(message.provisionalDate); + } + if (message.certificationType !== "") { + writer.uint32(66).string(message.certificationType); + } + if (message.reason !== "") { + writer.uint32(74).string(message.reason); + } + if (message.programTypeVersion !== "") { + writer.uint32(82).string(message.programTypeVersion); + } + if (message.cDCertificateId !== "") { + writer.uint32(90).string(message.cDCertificateId); + } + if (message.familyId !== "") { + writer.uint32(98).string(message.familyId); + } + if (message.supportedClusters !== "") { + writer.uint32(106).string(message.supportedClusters); + } + if (message.compliantPlatformUsed !== "") { + writer.uint32(114).string(message.compliantPlatformUsed); + } + if (message.compliantPlatformVersion !== "") { + writer.uint32(122).string(message.compliantPlatformVersion); + } + if (message.OSVersion !== "") { + writer.uint32(130).string(message.OSVersion); + } + if (message.certificationRoute !== "") { + writer.uint32(138).string(message.certificationRoute); + } + if (message.programType !== "") { + writer.uint32(146).string(message.programType); + } + if (message.transport !== "") { + writer.uint32(154).string(message.transport); + } + if (message.parentChild !== "") { + writer.uint32(162).string(message.parentChild); + } + if (message.certificationIdOfSoftwareComponent !== "") { + writer.uint32(170).string(message.certificationIdOfSoftwareComponent); + } + if (message.schemaVersion !== 0) { + writer.uint32(176).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgProvisionModel { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProvisionModel(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.vid = reader.int32(); + break; + case 3: + message.pid = reader.int32(); + break; + case 4: + message.softwareVersion = reader.uint32(); + break; + case 5: + message.softwareVersionString = reader.string(); + break; + case 6: + message.cDVersionNumber = reader.uint32(); + break; + case 7: + message.provisionalDate = reader.string(); + break; + case 8: + message.certificationType = reader.string(); + break; + case 9: + message.reason = reader.string(); + break; + case 10: + message.programTypeVersion = reader.string(); + break; + case 11: + message.cDCertificateId = reader.string(); + break; + case 12: + message.familyId = reader.string(); + break; + case 13: + message.supportedClusters = reader.string(); + break; + case 14: + message.compliantPlatformUsed = reader.string(); + break; + case 15: + message.compliantPlatformVersion = reader.string(); + break; + case 16: + message.OSVersion = reader.string(); + break; + case 17: + message.certificationRoute = reader.string(); + break; + case 18: + message.programType = reader.string(); + break; + case 19: + message.transport = reader.string(); + break; + case 20: + message.parentChild = reader.string(); + break; + case 21: + message.certificationIdOfSoftwareComponent = reader.string(); + break; + case 22: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgProvisionModel { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + softwareVersionString: isSet(object.softwareVersionString) ? String(object.softwareVersionString) : "", + cDVersionNumber: isSet(object.cDVersionNumber) ? Number(object.cDVersionNumber) : 0, + provisionalDate: isSet(object.provisionalDate) ? String(object.provisionalDate) : "", + certificationType: isSet(object.certificationType) ? String(object.certificationType) : "", + reason: isSet(object.reason) ? String(object.reason) : "", + programTypeVersion: isSet(object.programTypeVersion) ? String(object.programTypeVersion) : "", + cDCertificateId: isSet(object.cDCertificateId) ? String(object.cDCertificateId) : "", + familyId: isSet(object.familyId) ? String(object.familyId) : "", + supportedClusters: isSet(object.supportedClusters) ? String(object.supportedClusters) : "", + compliantPlatformUsed: isSet(object.compliantPlatformUsed) ? String(object.compliantPlatformUsed) : "", + compliantPlatformVersion: isSet(object.compliantPlatformVersion) ? String(object.compliantPlatformVersion) : "", + OSVersion: isSet(object.OSVersion) ? String(object.OSVersion) : "", + certificationRoute: isSet(object.certificationRoute) ? String(object.certificationRoute) : "", + programType: isSet(object.programType) ? String(object.programType) : "", + transport: isSet(object.transport) ? String(object.transport) : "", + parentChild: isSet(object.parentChild) ? String(object.parentChild) : "", + certificationIdOfSoftwareComponent: isSet(object.certificationIdOfSoftwareComponent) + ? String(object.certificationIdOfSoftwareComponent) + : "", + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgProvisionModel): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.softwareVersionString !== undefined && (obj.softwareVersionString = message.softwareVersionString); + message.cDVersionNumber !== undefined && (obj.cDVersionNumber = Math.round(message.cDVersionNumber)); + message.provisionalDate !== undefined && (obj.provisionalDate = message.provisionalDate); + message.certificationType !== undefined && (obj.certificationType = message.certificationType); + message.reason !== undefined && (obj.reason = message.reason); + message.programTypeVersion !== undefined && (obj.programTypeVersion = message.programTypeVersion); + message.cDCertificateId !== undefined && (obj.cDCertificateId = message.cDCertificateId); + message.familyId !== undefined && (obj.familyId = message.familyId); + message.supportedClusters !== undefined && (obj.supportedClusters = message.supportedClusters); + message.compliantPlatformUsed !== undefined && (obj.compliantPlatformUsed = message.compliantPlatformUsed); + message.compliantPlatformVersion !== undefined && (obj.compliantPlatformVersion = message.compliantPlatformVersion); + message.OSVersion !== undefined && (obj.OSVersion = message.OSVersion); + message.certificationRoute !== undefined && (obj.certificationRoute = message.certificationRoute); + message.programType !== undefined && (obj.programType = message.programType); + message.transport !== undefined && (obj.transport = message.transport); + message.parentChild !== undefined && (obj.parentChild = message.parentChild); + message.certificationIdOfSoftwareComponent !== undefined + && (obj.certificationIdOfSoftwareComponent = message.certificationIdOfSoftwareComponent); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgProvisionModel { + const message = createBaseMsgProvisionModel(); + message.signer = object.signer ?? ""; + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.softwareVersionString = object.softwareVersionString ?? ""; + message.cDVersionNumber = object.cDVersionNumber ?? 0; + message.provisionalDate = object.provisionalDate ?? ""; + message.certificationType = object.certificationType ?? ""; + message.reason = object.reason ?? ""; + message.programTypeVersion = object.programTypeVersion ?? ""; + message.cDCertificateId = object.cDCertificateId ?? ""; + message.familyId = object.familyId ?? ""; + message.supportedClusters = object.supportedClusters ?? ""; + message.compliantPlatformUsed = object.compliantPlatformUsed ?? ""; + message.compliantPlatformVersion = object.compliantPlatformVersion ?? ""; + message.OSVersion = object.OSVersion ?? ""; + message.certificationRoute = object.certificationRoute ?? ""; + message.programType = object.programType ?? ""; + message.transport = object.transport ?? ""; + message.parentChild = object.parentChild ?? ""; + message.certificationIdOfSoftwareComponent = object.certificationIdOfSoftwareComponent ?? ""; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgProvisionModelResponse(): MsgProvisionModelResponse { + return {}; +} + +export const MsgProvisionModelResponse = { + encode(_: MsgProvisionModelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgProvisionModelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProvisionModelResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgProvisionModelResponse { + return {}; + }, + + toJSON(_: MsgProvisionModelResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgProvisionModelResponse { + const message = createBaseMsgProvisionModelResponse(); + return message; + }, +}; + +function createBaseMsgUpdateComplianceInfo(): MsgUpdateComplianceInfo { + return { + creator: "", + vid: 0, + pid: 0, + softwareVersion: 0, + certificationType: "", + cDVersionNumber: "", + date: "", + reason: "", + owner: "", + cDCertificateId: "", + certificationRoute: "", + programType: "", + programTypeVersion: "", + compliantPlatformUsed: "", + compliantPlatformVersion: "", + transport: "", + familyId: "", + supportedClusters: "", + OSVersion: "", + parentChild: "", + certificationIdOfSoftwareComponent: "", + schemaVersion: 0, + }; +} + +export const MsgUpdateComplianceInfo = { + encode(message: MsgUpdateComplianceInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.vid !== 0) { + writer.uint32(16).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(24).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(32).uint32(message.softwareVersion); + } + if (message.certificationType !== "") { + writer.uint32(42).string(message.certificationType); + } + if (message.cDVersionNumber !== "") { + writer.uint32(50).string(message.cDVersionNumber); + } + if (message.date !== "") { + writer.uint32(58).string(message.date); + } + if (message.reason !== "") { + writer.uint32(66).string(message.reason); + } + if (message.owner !== "") { + writer.uint32(74).string(message.owner); + } + if (message.cDCertificateId !== "") { + writer.uint32(82).string(message.cDCertificateId); + } + if (message.certificationRoute !== "") { + writer.uint32(90).string(message.certificationRoute); + } + if (message.programType !== "") { + writer.uint32(98).string(message.programType); + } + if (message.programTypeVersion !== "") { + writer.uint32(106).string(message.programTypeVersion); + } + if (message.compliantPlatformUsed !== "") { + writer.uint32(114).string(message.compliantPlatformUsed); + } + if (message.compliantPlatformVersion !== "") { + writer.uint32(122).string(message.compliantPlatformVersion); + } + if (message.transport !== "") { + writer.uint32(130).string(message.transport); + } + if (message.familyId !== "") { + writer.uint32(138).string(message.familyId); + } + if (message.supportedClusters !== "") { + writer.uint32(146).string(message.supportedClusters); + } + if (message.OSVersion !== "") { + writer.uint32(154).string(message.OSVersion); + } + if (message.parentChild !== "") { + writer.uint32(162).string(message.parentChild); + } + if (message.certificationIdOfSoftwareComponent !== "") { + writer.uint32(170).string(message.certificationIdOfSoftwareComponent); + } + if (message.schemaVersion !== 0) { + writer.uint32(176).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateComplianceInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateComplianceInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.vid = reader.int32(); + break; + case 3: + message.pid = reader.int32(); + break; + case 4: + message.softwareVersion = reader.uint32(); + break; + case 5: + message.certificationType = reader.string(); + break; + case 6: + message.cDVersionNumber = reader.string(); + break; + case 7: + message.date = reader.string(); + break; + case 8: + message.reason = reader.string(); + break; + case 9: + message.owner = reader.string(); + break; + case 10: + message.cDCertificateId = reader.string(); + break; + case 11: + message.certificationRoute = reader.string(); + break; + case 12: + message.programType = reader.string(); + break; + case 13: + message.programTypeVersion = reader.string(); + break; + case 14: + message.compliantPlatformUsed = reader.string(); + break; + case 15: + message.compliantPlatformVersion = reader.string(); + break; + case 16: + message.transport = reader.string(); + break; + case 17: + message.familyId = reader.string(); + break; + case 18: + message.supportedClusters = reader.string(); + break; + case 19: + message.OSVersion = reader.string(); + break; + case 20: + message.parentChild = reader.string(); + break; + case 21: + message.certificationIdOfSoftwareComponent = reader.string(); + break; + case 22: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgUpdateComplianceInfo { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + certificationType: isSet(object.certificationType) ? String(object.certificationType) : "", + cDVersionNumber: isSet(object.cDVersionNumber) ? String(object.cDVersionNumber) : "", + date: isSet(object.date) ? String(object.date) : "", + reason: isSet(object.reason) ? String(object.reason) : "", + owner: isSet(object.owner) ? String(object.owner) : "", + cDCertificateId: isSet(object.cDCertificateId) ? String(object.cDCertificateId) : "", + certificationRoute: isSet(object.certificationRoute) ? String(object.certificationRoute) : "", + programType: isSet(object.programType) ? String(object.programType) : "", + programTypeVersion: isSet(object.programTypeVersion) ? String(object.programTypeVersion) : "", + compliantPlatformUsed: isSet(object.compliantPlatformUsed) ? String(object.compliantPlatformUsed) : "", + compliantPlatformVersion: isSet(object.compliantPlatformVersion) ? String(object.compliantPlatformVersion) : "", + transport: isSet(object.transport) ? String(object.transport) : "", + familyId: isSet(object.familyId) ? String(object.familyId) : "", + supportedClusters: isSet(object.supportedClusters) ? String(object.supportedClusters) : "", + OSVersion: isSet(object.OSVersion) ? String(object.OSVersion) : "", + parentChild: isSet(object.parentChild) ? String(object.parentChild) : "", + certificationIdOfSoftwareComponent: isSet(object.certificationIdOfSoftwareComponent) + ? String(object.certificationIdOfSoftwareComponent) + : "", + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgUpdateComplianceInfo): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.certificationType !== undefined && (obj.certificationType = message.certificationType); + message.cDVersionNumber !== undefined && (obj.cDVersionNumber = message.cDVersionNumber); + message.date !== undefined && (obj.date = message.date); + message.reason !== undefined && (obj.reason = message.reason); + message.owner !== undefined && (obj.owner = message.owner); + message.cDCertificateId !== undefined && (obj.cDCertificateId = message.cDCertificateId); + message.certificationRoute !== undefined && (obj.certificationRoute = message.certificationRoute); + message.programType !== undefined && (obj.programType = message.programType); + message.programTypeVersion !== undefined && (obj.programTypeVersion = message.programTypeVersion); + message.compliantPlatformUsed !== undefined && (obj.compliantPlatformUsed = message.compliantPlatformUsed); + message.compliantPlatformVersion !== undefined && (obj.compliantPlatformVersion = message.compliantPlatformVersion); + message.transport !== undefined && (obj.transport = message.transport); + message.familyId !== undefined && (obj.familyId = message.familyId); + message.supportedClusters !== undefined && (obj.supportedClusters = message.supportedClusters); + message.OSVersion !== undefined && (obj.OSVersion = message.OSVersion); + message.parentChild !== undefined && (obj.parentChild = message.parentChild); + message.certificationIdOfSoftwareComponent !== undefined + && (obj.certificationIdOfSoftwareComponent = message.certificationIdOfSoftwareComponent); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgUpdateComplianceInfo { + const message = createBaseMsgUpdateComplianceInfo(); + message.creator = object.creator ?? ""; + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.certificationType = object.certificationType ?? ""; + message.cDVersionNumber = object.cDVersionNumber ?? ""; + message.date = object.date ?? ""; + message.reason = object.reason ?? ""; + message.owner = object.owner ?? ""; + message.cDCertificateId = object.cDCertificateId ?? ""; + message.certificationRoute = object.certificationRoute ?? ""; + message.programType = object.programType ?? ""; + message.programTypeVersion = object.programTypeVersion ?? ""; + message.compliantPlatformUsed = object.compliantPlatformUsed ?? ""; + message.compliantPlatformVersion = object.compliantPlatformVersion ?? ""; + message.transport = object.transport ?? ""; + message.familyId = object.familyId ?? ""; + message.supportedClusters = object.supportedClusters ?? ""; + message.OSVersion = object.OSVersion ?? ""; + message.parentChild = object.parentChild ?? ""; + message.certificationIdOfSoftwareComponent = object.certificationIdOfSoftwareComponent ?? ""; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgUpdateComplianceInfoResponse(): MsgUpdateComplianceInfoResponse { + return {}; +} + +export const MsgUpdateComplianceInfoResponse = { + encode(_: MsgUpdateComplianceInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateComplianceInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateComplianceInfoResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgUpdateComplianceInfoResponse { + return {}; + }, + + toJSON(_: MsgUpdateComplianceInfoResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgUpdateComplianceInfoResponse { + const message = createBaseMsgUpdateComplianceInfoResponse(); + return message; + }, +}; + +function createBaseMsgDeleteComplianceInfo(): MsgDeleteComplianceInfo { + return { creator: "", vid: 0, pid: 0, softwareVersion: 0, certificationType: "" }; +} + +export const MsgDeleteComplianceInfo = { + encode(message: MsgDeleteComplianceInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.vid !== 0) { + writer.uint32(16).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(24).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(32).uint32(message.softwareVersion); + } + if (message.certificationType !== "") { + writer.uint32(42).string(message.certificationType); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDeleteComplianceInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDeleteComplianceInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.vid = reader.int32(); + break; + case 3: + message.pid = reader.int32(); + break; + case 4: + message.softwareVersion = reader.uint32(); + break; + case 5: + message.certificationType = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgDeleteComplianceInfo { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + certificationType: isSet(object.certificationType) ? String(object.certificationType) : "", + }; + }, + + toJSON(message: MsgDeleteComplianceInfo): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.certificationType !== undefined && (obj.certificationType = message.certificationType); + return obj; + }, + + fromPartial, I>>(object: I): MsgDeleteComplianceInfo { + const message = createBaseMsgDeleteComplianceInfo(); + message.creator = object.creator ?? ""; + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.certificationType = object.certificationType ?? ""; + return message; + }, +}; + +function createBaseMsgDeleteComplianceInfoResponse(): MsgDeleteComplianceInfoResponse { + return {}; +} + +export const MsgDeleteComplianceInfoResponse = { + encode(_: MsgDeleteComplianceInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDeleteComplianceInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDeleteComplianceInfoResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgDeleteComplianceInfoResponse { + return {}; + }, + + toJSON(_: MsgDeleteComplianceInfoResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgDeleteComplianceInfoResponse { + const message = createBaseMsgDeleteComplianceInfoResponse(); + return message; + }, +}; + +/** Msg defines the Msg service. */ +export interface Msg { + CertifyModel(request: MsgCertifyModel): Promise; + RevokeModel(request: MsgRevokeModel): Promise; + ProvisionModel(request: MsgProvisionModel): Promise; + UpdateComplianceInfo(request: MsgUpdateComplianceInfo): Promise; + /** this line is used by starport scaffolding # proto/tx/rpc */ + DeleteComplianceInfo(request: MsgDeleteComplianceInfo): Promise; +} + +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.CertifyModel = this.CertifyModel.bind(this); + this.RevokeModel = this.RevokeModel.bind(this); + this.ProvisionModel = this.ProvisionModel.bind(this); + this.UpdateComplianceInfo = this.UpdateComplianceInfo.bind(this); + this.DeleteComplianceInfo = this.DeleteComplianceInfo.bind(this); + } + CertifyModel(request: MsgCertifyModel): Promise { + const data = MsgCertifyModel.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.compliance.Msg", "CertifyModel", data); + return promise.then((data) => MsgCertifyModelResponse.decode(new _m0.Reader(data))); + } + + RevokeModel(request: MsgRevokeModel): Promise { + const data = MsgRevokeModel.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.compliance.Msg", "RevokeModel", data); + return promise.then((data) => MsgRevokeModelResponse.decode(new _m0.Reader(data))); + } + + ProvisionModel(request: MsgProvisionModel): Promise { + const data = MsgProvisionModel.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.compliance.Msg", + "ProvisionModel", + data, + ); + return promise.then((data) => MsgProvisionModelResponse.decode(new _m0.Reader(data))); + } + + UpdateComplianceInfo(request: MsgUpdateComplianceInfo): Promise { + const data = MsgUpdateComplianceInfo.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.compliance.Msg", + "UpdateComplianceInfo", + data, + ); + return promise.then((data) => MsgUpdateComplianceInfoResponse.decode(new _m0.Reader(data))); + } + + DeleteComplianceInfo(request: MsgDeleteComplianceInfo): Promise { + const data = MsgDeleteComplianceInfo.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.compliance.Msg", + "DeleteComplianceInfo", + data, + ); + return promise.then((data) => MsgDeleteComplianceInfoResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/api.swagger.yml b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/api.swagger.yml new file mode 100644 index 000000000..56d697417 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/api.swagger.yml @@ -0,0 +1,2191 @@ +swagger: '2.0' +info: + title: HTTP API Console zigbeealliance.distributedcomplianceledger.dclauth + name: '' + description: '' +paths: + /dcl/auth/accounts: + get: + operationId: AccountAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + account: + type: array + items: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/auth/accounts/stat: + get: + operationId: AccountStat + responses: + '200': + description: A successful response. + schema: + type: object + properties: + AccountStat: + type: object + properties: + number: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + tags: + - Query + /dcl/auth/accounts/{address}: + get: + operationId: Account + responses: + '200': + description: A successful response. + schema: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: address + in: path + required: true + type: string + tags: + - Query + /dcl/auth/proposed-accounts: + get: + operationId: PendingAccountAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + pendingAccount: + type: array + items: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/auth/proposed-accounts/{address}: + get: + operationId: PendingAccount + responses: + '200': + description: A successful response. + schema: + type: object + properties: + pendingAccount: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: address + in: path + required: true + type: string + tags: + - Query + /dcl/auth/proposed-revocation-accounts: + get: + operationId: PendingAccountRevocationAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + pendingAccountRevocation: + type: array + items: + type: object + properties: + address: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/auth/proposed-revocation-accounts/{address}: + get: + operationId: PendingAccountRevocation + responses: + '200': + description: A successful response. + schema: + type: object + properties: + pendingAccountRevocation: + type: object + properties: + address: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: address + in: path + required: true + type: string + tags: + - Query + /dcl/auth/rejected-accounts: + get: + operationId: RejectedAccountAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + rejectedAccount: + type: array + items: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/auth/rejected-accounts/{address}: + get: + operationId: RejectedAccount + responses: + '200': + description: A successful response. + schema: + type: object + properties: + rejectedAccount: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: address + in: path + required: true + type: string + tags: + - Query + /dcl/auth/revoked-accounts: + get: + operationId: RevokedAccountAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + revokedAccount: + type: array + items: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + revokeApprovals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + reason: + type: string + enum: + - TrusteeVoting + - MaliciousValidator + default: TrusteeVoting + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/auth/revoked-accounts/{address}: + get: + operationId: RevokedAccount + responses: + '200': + description: A successful response. + schema: + type: object + properties: + revokedAccount: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + revokeApprovals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + reason: + type: string + enum: + - TrusteeVoting + - MaliciousValidator + default: TrusteeVoting + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: address + in: path + required: true + type: string + tags: + - Query +definitions: + Any: + type: object + properties: + '@type': + type: string + additionalProperties: {} + Status: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + BaseAccount: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + Grant: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + PageRequest: + type: object + properties: + key: + type: string + format: byte + offset: + type: string + format: uint64 + limit: + type: string + format: uint64 + count_total: + type: boolean + reverse: + type: boolean + PageResponse: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllAccountResponse: + type: object + properties: + account: + type: array + items: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllPendingAccountResponse: + type: object + properties: + pendingAccount: + type: array + items: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllPendingAccountRevocationResponse: + type: object + properties: + pendingAccountRevocation: + type: array + items: + type: object + properties: + address: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllRejectedAccountResponse: + type: object + properties: + rejectedAccount: + type: array + items: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllRevokedAccountResponse: + type: object + properties: + revokedAccount: + type: array + items: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + revokeApprovals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + reason: + type: string + enum: + - TrusteeVoting + - MaliciousValidator + default: TrusteeVoting + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryGetAccountResponse: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + QueryGetAccountStatResponse: + type: object + properties: + AccountStat: + type: object + properties: + number: + type: string + format: uint64 + QueryGetPendingAccountResponse: + type: object + properties: + pendingAccount: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + QueryGetPendingAccountRevocationResponse: + type: object + properties: + pendingAccountRevocation: + type: object + properties: + address: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + QueryGetRejectedAccountResponse: + type: object + properties: + rejectedAccount: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + QueryGetRevokedAccountResponse: + type: object + properties: + revokedAccount: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + revokeApprovals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + reason: + type: string + enum: + - TrusteeVoting + - MaliciousValidator + default: TrusteeVoting + Reason: + type: string + enum: + - TrusteeVoting + - MaliciousValidator + default: TrusteeVoting + Uint16Range: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + dclauth.Account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + dclauth.AccountStat: + type: object + properties: + number: + type: string + format: uint64 + dclauth.PendingAccount: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + dclauth.PendingAccountRevocation: + type: object + properties: + address: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + dclauth.RejectedAccount: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + dclauth.RevokedAccount: + type: object + properties: + account: + type: object + properties: + base_account: + type: object + properties: + address: + type: string + pub_key: + type: object + properties: + '@type': + type: string + additionalProperties: {} + account_number: + type: string + format: uint64 + sequence: + type: string + format: uint64 + roles: + type: array + items: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vendorID: + type: integer + format: int32 + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + productIDs: + type: array + items: + type: object + properties: + min: + type: integer + format: int32 + max: + type: integer + format: int32 + revokeApprovals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + reason: + type: string + enum: + - TrusteeVoting + - MaliciousValidator + default: TrusteeVoting + MsgApproveAddAccountResponse: + type: object + MsgApproveRevokeAccountResponse: + type: object + MsgProposeAddAccountResponse: + type: object + MsgProposeRevokeAccountResponse: + type: object + MsgRejectAddAccountResponse: + type: object diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/index.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/module.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/module.ts new file mode 100755 index 000000000..fc8419731 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/module.ts @@ -0,0 +1,275 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; +import { MsgProposeAddAccount } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/tx"; +import { MsgApproveAddAccount } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/tx"; +import { MsgRejectAddAccount } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/tx"; +import { MsgApproveRevokeAccount } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/tx"; +import { MsgProposeRevokeAccount } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/tx"; + +import { Account as typeAccount} from "./types" +import { AccountStat as typeAccountStat} from "./types" +import { Grant as typeGrant} from "./types" +import { PendingAccount as typePendingAccount} from "./types" +import { PendingAccountRevocation as typePendingAccountRevocation} from "./types" +import { RejectedAccount as typeRejectedAccount} from "./types" +import { RevokedAccount as typeRevokedAccount} from "./types" + +export { MsgProposeAddAccount, MsgApproveAddAccount, MsgRejectAddAccount, MsgApproveRevokeAccount, MsgProposeRevokeAccount }; + +type sendMsgProposeAddAccountParams = { + value: MsgProposeAddAccount, + fee?: StdFee, + memo?: string +}; + +type sendMsgApproveAddAccountParams = { + value: MsgApproveAddAccount, + fee?: StdFee, + memo?: string +}; + +type sendMsgRejectAddAccountParams = { + value: MsgRejectAddAccount, + fee?: StdFee, + memo?: string +}; + +type sendMsgApproveRevokeAccountParams = { + value: MsgApproveRevokeAccount, + fee?: StdFee, + memo?: string +}; + +type sendMsgProposeRevokeAccountParams = { + value: MsgProposeRevokeAccount, + fee?: StdFee, + memo?: string +}; + + +type msgProposeAddAccountParams = { + value: MsgProposeAddAccount, +}; + +type msgApproveAddAccountParams = { + value: MsgApproveAddAccount, +}; + +type msgRejectAddAccountParams = { + value: MsgRejectAddAccount, +}; + +type msgApproveRevokeAccountParams = { + value: MsgApproveRevokeAccount, +}; + +type msgProposeRevokeAccountParams = { + value: MsgProposeRevokeAccount, +}; + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + async sendMsgProposeAddAccount({ value, fee, memo }: sendMsgProposeAddAccountParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgProposeAddAccount: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgProposeAddAccount({ value: MsgProposeAddAccount.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgProposeAddAccount: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgApproveAddAccount({ value, fee, memo }: sendMsgApproveAddAccountParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgApproveAddAccount: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgApproveAddAccount({ value: MsgApproveAddAccount.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgApproveAddAccount: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgRejectAddAccount({ value, fee, memo }: sendMsgRejectAddAccountParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgRejectAddAccount: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgRejectAddAccount({ value: MsgRejectAddAccount.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgRejectAddAccount: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgApproveRevokeAccount({ value, fee, memo }: sendMsgApproveRevokeAccountParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgApproveRevokeAccount: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgApproveRevokeAccount({ value: MsgApproveRevokeAccount.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgApproveRevokeAccount: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgProposeRevokeAccount({ value, fee, memo }: sendMsgProposeRevokeAccountParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgProposeRevokeAccount: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgProposeRevokeAccount({ value: MsgProposeRevokeAccount.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgProposeRevokeAccount: Could not broadcast Tx: '+ e.message) + } + }, + + + msgProposeAddAccount({ value }: msgProposeAddAccountParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.dclauth.MsgProposeAddAccount", value: MsgProposeAddAccount.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgProposeAddAccount: Could not create message: ' + e.message) + } + }, + + msgApproveAddAccount({ value }: msgApproveAddAccountParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.dclauth.MsgApproveAddAccount", value: MsgApproveAddAccount.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgApproveAddAccount: Could not create message: ' + e.message) + } + }, + + msgRejectAddAccount({ value }: msgRejectAddAccountParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.dclauth.MsgRejectAddAccount", value: MsgRejectAddAccount.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgRejectAddAccount: Could not create message: ' + e.message) + } + }, + + msgApproveRevokeAccount({ value }: msgApproveRevokeAccountParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.dclauth.MsgApproveRevokeAccount", value: MsgApproveRevokeAccount.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgApproveRevokeAccount: Could not create message: ' + e.message) + } + }, + + msgProposeRevokeAccount({ value }: msgProposeRevokeAccountParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.dclauth.MsgProposeRevokeAccount", value: MsgProposeRevokeAccount.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgProposeRevokeAccount: Could not create message: ' + e.message) + } + }, + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + Account: getStructure(typeAccount.fromPartial({})), + AccountStat: getStructure(typeAccountStat.fromPartial({})), + Grant: getStructure(typeGrant.fromPartial({})), + PendingAccount: getStructure(typePendingAccount.fromPartial({})), + PendingAccountRevocation: getStructure(typePendingAccountRevocation.fromPartial({})), + RejectedAccount: getStructure(typeRejectedAccount.fromPartial({})), + RevokedAccount: getStructure(typeRevokedAccount.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + ZigbeeallianceDistributedcomplianceledgerDclauth: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/registry.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/registry.ts new file mode 100755 index 000000000..bdc07df48 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/registry.ts @@ -0,0 +1,17 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; +import { MsgProposeAddAccount } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/tx"; +import { MsgApproveAddAccount } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/tx"; +import { MsgRejectAddAccount } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/tx"; +import { MsgApproveRevokeAccount } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/tx"; +import { MsgProposeRevokeAccount } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/tx"; + +const msgTypes: Array<[string, GeneratedType]> = [ + ["/zigbeealliance.distributedcomplianceledger.dclauth.MsgProposeAddAccount", MsgProposeAddAccount], + ["/zigbeealliance.distributedcomplianceledger.dclauth.MsgApproveAddAccount", MsgApproveAddAccount], + ["/zigbeealliance.distributedcomplianceledger.dclauth.MsgRejectAddAccount", MsgRejectAddAccount], + ["/zigbeealliance.distributedcomplianceledger.dclauth.MsgApproveRevokeAccount", MsgApproveRevokeAccount], + ["/zigbeealliance.distributedcomplianceledger.dclauth.MsgProposeRevokeAccount", MsgProposeRevokeAccount], + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/rest.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/rest.ts new file mode 100644 index 000000000..c3082e959 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/rest.ts @@ -0,0 +1,809 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +export enum RevokedAccountReason { + TrusteeVoting = "TrusteeVoting", + MaliciousValidator = "MaliciousValidator", +} + +export interface CommonUint16Range { + /** @format int32 */ + min?: number; + + /** @format int32 */ + max?: number; +} + +export interface DclauthGrant { + address?: string; + + /** + * number of nanoseconds elapsed since January 1, 1970 UTC + * @format int64 + */ + time?: string; + info?: string; +} + +export type DclauthMsgApproveAddAccountResponse = object; + +export type DclauthMsgApproveRevokeAccountResponse = object; + +export type DclauthMsgProposeAddAccountResponse = object; + +export type DclauthMsgProposeRevokeAccountResponse = object; + +export type DclauthMsgRejectAddAccountResponse = object; + +export interface DclauthQueryAllAccountResponse { + account?: DistributedcomplianceledgerdclauthAccount[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface DclauthQueryAllPendingAccountResponse { + pendingAccount?: DistributedcomplianceledgerdclauthPendingAccount[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface DclauthQueryAllPendingAccountRevocationResponse { + pendingAccountRevocation?: DistributedcomplianceledgerdclauthPendingAccountRevocation[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface DclauthQueryAllRejectedAccountResponse { + rejectedAccount?: DistributedcomplianceledgerdclauthRejectedAccount[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface DclauthQueryAllRevokedAccountResponse { + revokedAccount?: DistributedcomplianceledgerdclauthRevokedAccount[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface DclauthQueryGetAccountResponse { + account?: DistributedcomplianceledgerdclauthAccount; +} + +export interface DclauthQueryGetAccountStatResponse { + AccountStat?: DistributedcomplianceledgerdclauthAccountStat; +} + +export interface DclauthQueryGetPendingAccountResponse { + pendingAccount?: DistributedcomplianceledgerdclauthPendingAccount; +} + +export interface DclauthQueryGetPendingAccountRevocationResponse { + pendingAccountRevocation?: DistributedcomplianceledgerdclauthPendingAccountRevocation; +} + +export interface DclauthQueryGetRejectedAccountResponse { + rejectedAccount?: DistributedcomplianceledgerdclauthRejectedAccount; +} + +export interface DclauthQueryGetRevokedAccountResponse { + revokedAccount?: DistributedcomplianceledgerdclauthRevokedAccount; +} + +export interface DistributedcomplianceledgerdclauthAccount { + /** + * BaseAccount defines a base account type. It contains all the necessary fields + * for basic account functionality. Any custom account type should extend this + * type for additional functionality (e.g. vesting). + */ + base_account?: V1Beta1BaseAccount; + + /** + * NOTE. we do not user AccountRoles casting here to preserve repeated form + * so protobuf takes care about repeated items in generated code, + * (but that might be not the final solution) + */ + roles?: string[]; + approvals?: DclauthGrant[]; + + /** @format int32 */ + vendorID?: number; + rejects?: DclauthGrant[]; + productIDs?: CommonUint16Range[]; +} + +export interface DistributedcomplianceledgerdclauthAccountStat { + /** @format uint64 */ + number?: string; +} + +export interface DistributedcomplianceledgerdclauthPendingAccount { + account?: DistributedcomplianceledgerdclauthAccount; +} + +export interface DistributedcomplianceledgerdclauthPendingAccountRevocation { + address?: string; + approvals?: DclauthGrant[]; +} + +export interface DistributedcomplianceledgerdclauthRejectedAccount { + account?: DistributedcomplianceledgerdclauthAccount; +} + +export interface DistributedcomplianceledgerdclauthRevokedAccount { + account?: DistributedcomplianceledgerdclauthAccount; + revokeApprovals?: DclauthGrant[]; + reason?: RevokedAccountReason; +} + +/** +* `Any` contains an arbitrary serialized protocol buffer message along with a +URL that describes the type of the serialized message. + +Protobuf library provides support to pack/unpack Any values in the form +of utility functions or additional generated methods of the Any type. + +Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + +Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + +The pack methods provided by protobuf library will by default use +'type.googleapis.com/full.type.name' as the type URL and the unpack +methods only use the fully qualified type name after the last '/' +in the type URL, for example "foo.bar.com/x/y.z" will yield type +name "y.z". + + +JSON +==== +The JSON representation of an `Any` value uses the regular +representation of the deserialized, embedded message, with an +additional field `@type` which contains the type URL. Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + +If the embedded message type is well-known and has a custom JSON +representation, that representation will be embedded adding a field +`value` which holds the custom JSON in addition to the `@type` +field. Example (for message [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } +*/ +export interface ProtobufAny { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +/** +* BaseAccount defines a base account type. It contains all the necessary fields +for basic account functionality. Any custom account type should extend this +type for additional functionality (e.g. vesting). +*/ +export interface V1Beta1BaseAccount { + address?: string; + + /** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * Example 1: Pack and unpack a message in C++. + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * Example 2: Pack and unpack a message in Java. + * Any any = Any.pack(foo); + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * Example 3: Pack and unpack a message in Python. + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * Example 4: Pack and unpack a message in Go + * foo := &pb.Foo{...} + * any, err := anypb.New(foo) + * if err != nil { + * ... + * } + * ... + * foo := &pb.Foo{} + * if err := any.UnmarshalTo(foo); err != nil { + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + */ + pub_key?: ProtobufAny; + + /** @format uint64 */ + account_number?: string; + + /** @format uint64 */ + sequence?: string; +} + +/** +* message SomeRequest { + Foo some_parameter = 1; + PageRequest pagination = 2; + } +*/ +export interface V1Beta1PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + * @format byte + */ + key?: string; + + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + * @format uint64 + */ + offset?: string; + + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + * @format uint64 + */ + limit?: string; + + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + count_total?: boolean; + + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse?: boolean; +} + +/** +* PageResponse is to be embedded in gRPC response messages where the +corresponding request message has used PageRequest. + + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } +*/ +export interface V1Beta1PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + * @format byte + */ + next_key?: string; + + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + * @format uint64 + */ + total?: string; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title zigbeealliance/distributedcomplianceledger/dclauth/account.proto + * @version version not set + */ +export class Api extends HttpClient { + /** + * No description + * + * @tags Query + * @name QueryAccountAll + * @summary Queries a list of account items. + * @request GET:/dcl/auth/accounts + */ + queryAccountAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/auth/accounts`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryAccountStat + * @summary Queries a accountStat by index. + * @request GET:/dcl/auth/accounts/stat + */ + queryAccountStat = (params: RequestParams = {}) => + this.request({ + path: `/dcl/auth/accounts/stat`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryAccount + * @summary Queries a account by index. + * @request GET:/dcl/auth/accounts/{address} + */ + queryAccount = (address: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/auth/accounts/${address}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryPendingAccountAll + * @summary Queries a list of pendingAccount items. + * @request GET:/dcl/auth/proposed-accounts + */ + queryPendingAccountAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/auth/proposed-accounts`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryPendingAccount + * @summary Queries a pendingAccount by index. + * @request GET:/dcl/auth/proposed-accounts/{address} + */ + queryPendingAccount = (address: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/auth/proposed-accounts/${address}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryPendingAccountRevocationAll + * @summary Queries a list of pendingAccountRevocation items. + * @request GET:/dcl/auth/proposed-revocation-accounts + */ + queryPendingAccountRevocationAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/auth/proposed-revocation-accounts`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryPendingAccountRevocation + * @summary Queries a pendingAccountRevocation by index. + * @request GET:/dcl/auth/proposed-revocation-accounts/{address} + */ + queryPendingAccountRevocation = (address: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/auth/proposed-revocation-accounts/${address}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRejectedAccountAll + * @summary Queries a list of RejectedAccount items. + * @request GET:/dcl/auth/rejected-accounts + */ + queryRejectedAccountAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/auth/rejected-accounts`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRejectedAccount + * @summary Queries a RejectedAccount by index. + * @request GET:/dcl/auth/rejected-accounts/{address} + */ + queryRejectedAccount = (address: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/auth/rejected-accounts/${address}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRevokedAccountAll + * @summary Queries a list of RevokedAccount items. + * @request GET:/dcl/auth/revoked-accounts + */ + queryRevokedAccountAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/auth/revoked-accounts`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRevokedAccount + * @summary Queries a RevokedAccount by index. + * @request GET:/dcl/auth/revoked-accounts/{address} + */ + queryRevokedAccount = (address: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/auth/revoked-accounts/${address}`, + method: "GET", + format: "json", + ...params, + }); +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types.ts new file mode 100755 index 000000000..cff20ec6d --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types.ts @@ -0,0 +1,19 @@ +import { Account } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/account" +import { AccountStat } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/account_stat" +import { Grant } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/grant" +import { PendingAccount } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/pending_account" +import { PendingAccountRevocation } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/pending_account_revocation" +import { RejectedAccount } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/rejected_account" +import { RevokedAccount } from "./types/zigbeealliance/distributedcomplianceledger/dclauth/revoked_account" + + +export { + Account, + AccountStat, + Grant, + PendingAccount, + PendingAccountRevocation, + RejectedAccount, + RevokedAccount, + + } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/amino/amino.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/amino/amino.ts new file mode 100644 index 000000000..4b67dcfe4 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/amino/amino.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "amino"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/cosmos/auth/v1beta1/auth.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/cosmos/auth/v1beta1/auth.ts new file mode 100644 index 000000000..a37ece488 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/cosmos/auth/v1beta1/auth.ts @@ -0,0 +1,428 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Any } from "../../../google/protobuf/any"; + +export const protobufPackage = "cosmos.auth.v1beta1"; + +/** + * BaseAccount defines a base account type. It contains all the necessary fields + * for basic account functionality. Any custom account type should extend this + * type for additional functionality (e.g. vesting). + */ +export interface BaseAccount { + address: string; + pubKey: Any | undefined; + accountNumber: number; + sequence: number; +} + +/** ModuleAccount defines an account for modules that holds coins on a pool. */ +export interface ModuleAccount { + baseAccount: BaseAccount | undefined; + name: string; + permissions: string[]; +} + +/** + * ModuleCredential represents a unclaimable pubkey for base accounts controlled by modules. + * + * Since: cosmos-sdk 0.47 + */ +export interface ModuleCredential { + /** module_name is the name of the module used for address derivation (passed into address.Module). */ + moduleName: string; + /** + * derivation_keys is for deriving a module account address (passed into address.Module) + * adding more keys creates sub-account addresses (passed into address.Derive) + */ + derivationKeys: Uint8Array[]; +} + +/** Params defines the parameters for the auth module. */ +export interface Params { + maxMemoCharacters: number; + txSigLimit: number; + txSizeCostPerByte: number; + sigVerifyCostEd25519: number; + sigVerifyCostSecp256k1: number; +} + +function createBaseBaseAccount(): BaseAccount { + return { address: "", pubKey: undefined, accountNumber: 0, sequence: 0 }; +} + +export const BaseAccount = { + encode(message: BaseAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + if (message.pubKey !== undefined) { + Any.encode(message.pubKey, writer.uint32(18).fork()).ldelim(); + } + if (message.accountNumber !== 0) { + writer.uint32(24).uint64(message.accountNumber); + } + if (message.sequence !== 0) { + writer.uint32(32).uint64(message.sequence); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): BaseAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBaseAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + case 2: + message.pubKey = Any.decode(reader, reader.uint32()); + break; + case 3: + message.accountNumber = longToNumber(reader.uint64() as Long); + break; + case 4: + message.sequence = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): BaseAccount { + return { + address: isSet(object.address) ? String(object.address) : "", + pubKey: isSet(object.pubKey) ? Any.fromJSON(object.pubKey) : undefined, + accountNumber: isSet(object.accountNumber) ? Number(object.accountNumber) : 0, + sequence: isSet(object.sequence) ? Number(object.sequence) : 0, + }; + }, + + toJSON(message: BaseAccount): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + message.pubKey !== undefined && (obj.pubKey = message.pubKey ? Any.toJSON(message.pubKey) : undefined); + message.accountNumber !== undefined && (obj.accountNumber = Math.round(message.accountNumber)); + message.sequence !== undefined && (obj.sequence = Math.round(message.sequence)); + return obj; + }, + + fromPartial, I>>(object: I): BaseAccount { + const message = createBaseBaseAccount(); + message.address = object.address ?? ""; + message.pubKey = (object.pubKey !== undefined && object.pubKey !== null) + ? Any.fromPartial(object.pubKey) + : undefined; + message.accountNumber = object.accountNumber ?? 0; + message.sequence = object.sequence ?? 0; + return message; + }, +}; + +function createBaseModuleAccount(): ModuleAccount { + return { baseAccount: undefined, name: "", permissions: [] }; +} + +export const ModuleAccount = { + encode(message: ModuleAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseAccount !== undefined) { + BaseAccount.encode(message.baseAccount, writer.uint32(10).fork()).ldelim(); + } + if (message.name !== "") { + writer.uint32(18).string(message.name); + } + for (const v of message.permissions) { + writer.uint32(26).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.baseAccount = BaseAccount.decode(reader, reader.uint32()); + break; + case 2: + message.name = reader.string(); + break; + case 3: + message.permissions.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ModuleAccount { + return { + baseAccount: isSet(object.baseAccount) ? BaseAccount.fromJSON(object.baseAccount) : undefined, + name: isSet(object.name) ? String(object.name) : "", + permissions: Array.isArray(object?.permissions) ? object.permissions.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: ModuleAccount): unknown { + const obj: any = {}; + message.baseAccount !== undefined + && (obj.baseAccount = message.baseAccount ? BaseAccount.toJSON(message.baseAccount) : undefined); + message.name !== undefined && (obj.name = message.name); + if (message.permissions) { + obj.permissions = message.permissions.map((e) => e); + } else { + obj.permissions = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ModuleAccount { + const message = createBaseModuleAccount(); + message.baseAccount = (object.baseAccount !== undefined && object.baseAccount !== null) + ? BaseAccount.fromPartial(object.baseAccount) + : undefined; + message.name = object.name ?? ""; + message.permissions = object.permissions?.map((e) => e) || []; + return message; + }, +}; + +function createBaseModuleCredential(): ModuleCredential { + return { moduleName: "", derivationKeys: [] }; +} + +export const ModuleCredential = { + encode(message: ModuleCredential, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.moduleName !== "") { + writer.uint32(10).string(message.moduleName); + } + for (const v of message.derivationKeys) { + writer.uint32(18).bytes(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleCredential { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleCredential(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.moduleName = reader.string(); + break; + case 2: + message.derivationKeys.push(reader.bytes()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ModuleCredential { + return { + moduleName: isSet(object.moduleName) ? String(object.moduleName) : "", + derivationKeys: Array.isArray(object?.derivationKeys) + ? object.derivationKeys.map((e: any) => bytesFromBase64(e)) + : [], + }; + }, + + toJSON(message: ModuleCredential): unknown { + const obj: any = {}; + message.moduleName !== undefined && (obj.moduleName = message.moduleName); + if (message.derivationKeys) { + obj.derivationKeys = message.derivationKeys.map((e) => base64FromBytes(e !== undefined ? e : new Uint8Array())); + } else { + obj.derivationKeys = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ModuleCredential { + const message = createBaseModuleCredential(); + message.moduleName = object.moduleName ?? ""; + message.derivationKeys = object.derivationKeys?.map((e) => e) || []; + return message; + }, +}; + +function createBaseParams(): Params { + return { + maxMemoCharacters: 0, + txSigLimit: 0, + txSizeCostPerByte: 0, + sigVerifyCostEd25519: 0, + sigVerifyCostSecp256k1: 0, + }; +} + +export const Params = { + encode(message: Params, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.maxMemoCharacters !== 0) { + writer.uint32(8).uint64(message.maxMemoCharacters); + } + if (message.txSigLimit !== 0) { + writer.uint32(16).uint64(message.txSigLimit); + } + if (message.txSizeCostPerByte !== 0) { + writer.uint32(24).uint64(message.txSizeCostPerByte); + } + if (message.sigVerifyCostEd25519 !== 0) { + writer.uint32(32).uint64(message.sigVerifyCostEd25519); + } + if (message.sigVerifyCostSecp256k1 !== 0) { + writer.uint32(40).uint64(message.sigVerifyCostSecp256k1); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Params { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.maxMemoCharacters = longToNumber(reader.uint64() as Long); + break; + case 2: + message.txSigLimit = longToNumber(reader.uint64() as Long); + break; + case 3: + message.txSizeCostPerByte = longToNumber(reader.uint64() as Long); + break; + case 4: + message.sigVerifyCostEd25519 = longToNumber(reader.uint64() as Long); + break; + case 5: + message.sigVerifyCostSecp256k1 = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Params { + return { + maxMemoCharacters: isSet(object.maxMemoCharacters) ? Number(object.maxMemoCharacters) : 0, + txSigLimit: isSet(object.txSigLimit) ? Number(object.txSigLimit) : 0, + txSizeCostPerByte: isSet(object.txSizeCostPerByte) ? Number(object.txSizeCostPerByte) : 0, + sigVerifyCostEd25519: isSet(object.sigVerifyCostEd25519) ? Number(object.sigVerifyCostEd25519) : 0, + sigVerifyCostSecp256k1: isSet(object.sigVerifyCostSecp256k1) ? Number(object.sigVerifyCostSecp256k1) : 0, + }; + }, + + toJSON(message: Params): unknown { + const obj: any = {}; + message.maxMemoCharacters !== undefined && (obj.maxMemoCharacters = Math.round(message.maxMemoCharacters)); + message.txSigLimit !== undefined && (obj.txSigLimit = Math.round(message.txSigLimit)); + message.txSizeCostPerByte !== undefined && (obj.txSizeCostPerByte = Math.round(message.txSizeCostPerByte)); + message.sigVerifyCostEd25519 !== undefined && (obj.sigVerifyCostEd25519 = Math.round(message.sigVerifyCostEd25519)); + message.sigVerifyCostSecp256k1 !== undefined + && (obj.sigVerifyCostSecp256k1 = Math.round(message.sigVerifyCostSecp256k1)); + return obj; + }, + + fromPartial, I>>(object: I): Params { + const message = createBaseParams(); + message.maxMemoCharacters = object.maxMemoCharacters ?? 0; + message.txSigLimit = object.txSigLimit ?? 0; + message.txSizeCostPerByte = object.txSizeCostPerByte ?? 0; + message.sigVerifyCostEd25519 = object.sigVerifyCostEd25519 ?? 0; + message.sigVerifyCostSecp256k1 = object.sigVerifyCostSecp256k1 ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/cosmos/base/query/v1beta1/pagination.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/cosmos/base/query/v1beta1/pagination.ts new file mode 100644 index 000000000..a31ca249d --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/cosmos/base/query/v1beta1/pagination.ts @@ -0,0 +1,286 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.base.query.v1beta1"; + +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset: number; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: number; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + countTotal: boolean; + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse: boolean; +} + +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total: number; +} + +function createBasePageRequest(): PageRequest { + return { key: new Uint8Array(), offset: 0, limit: 0, countTotal: false, reverse: false }; +} + +export const PageRequest = { + encode(message: PageRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.offset !== 0) { + writer.uint32(16).uint64(message.offset); + } + if (message.limit !== 0) { + writer.uint32(24).uint64(message.limit); + } + if (message.countTotal === true) { + writer.uint32(32).bool(message.countTotal); + } + if (message.reverse === true) { + writer.uint32(40).bool(message.reverse); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + case 2: + message.offset = longToNumber(reader.uint64() as Long); + break; + case 3: + message.limit = longToNumber(reader.uint64() as Long); + break; + case 4: + message.countTotal = reader.bool(); + break; + case 5: + message.reverse = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageRequest { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + offset: isSet(object.offset) ? Number(object.offset) : 0, + limit: isSet(object.limit) ? Number(object.limit) : 0, + countTotal: isSet(object.countTotal) ? Boolean(object.countTotal) : false, + reverse: isSet(object.reverse) ? Boolean(object.reverse) : false, + }; + }, + + toJSON(message: PageRequest): unknown { + const obj: any = {}; + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.offset !== undefined && (obj.offset = Math.round(message.offset)); + message.limit !== undefined && (obj.limit = Math.round(message.limit)); + message.countTotal !== undefined && (obj.countTotal = message.countTotal); + message.reverse !== undefined && (obj.reverse = message.reverse); + return obj; + }, + + fromPartial, I>>(object: I): PageRequest { + const message = createBasePageRequest(); + message.key = object.key ?? new Uint8Array(); + message.offset = object.offset ?? 0; + message.limit = object.limit ?? 0; + message.countTotal = object.countTotal ?? false; + message.reverse = object.reverse ?? false; + return message; + }, +}; + +function createBasePageResponse(): PageResponse { + return { nextKey: new Uint8Array(), total: 0 }; +} + +export const PageResponse = { + encode(message: PageResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextKey.length !== 0) { + writer.uint32(10).bytes(message.nextKey); + } + if (message.total !== 0) { + writer.uint32(16).uint64(message.total); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nextKey = reader.bytes(); + break; + case 2: + message.total = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageResponse { + return { + nextKey: isSet(object.nextKey) ? bytesFromBase64(object.nextKey) : new Uint8Array(), + total: isSet(object.total) ? Number(object.total) : 0, + }; + }, + + toJSON(message: PageResponse): unknown { + const obj: any = {}; + message.nextKey !== undefined + && (obj.nextKey = base64FromBytes(message.nextKey !== undefined ? message.nextKey : new Uint8Array())); + message.total !== undefined && (obj.total = Math.round(message.total)); + return obj; + }, + + fromPartial, I>>(object: I): PageResponse { + const message = createBasePageResponse(); + message.nextKey = object.nextKey ?? new Uint8Array(); + message.total = object.total ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/cosmos_proto/cosmos.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..79c8d3486 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/cosmos_proto/cosmos.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/gogoproto/gogo.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/google/api/annotations.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/google/api/annotations.ts new file mode 100644 index 000000000..aace4787f --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/google/api/annotations.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "google.api"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/google/api/http.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/google/api/http.ts new file mode 100644 index 000000000..17e770d15 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/google/api/http.ts @@ -0,0 +1,589 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get: + | string + | undefined; + /** Used for updating a resource. */ + put: + | string + | undefined; + /** Used for creating a resource. */ + post: + | string + | undefined; + /** Used for deleting a resource. */ + delete: + | string + | undefined; + /** Used for updating a resource. */ + patch: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined + && (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined + && (obj.custom = message.custom ? CustomHttpPattern.toJSON(message.custom) : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.additionalBindings = []; + } + return obj; + }, + + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/google/protobuf/any.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/google/protobuf/any.ts new file mode 100644 index 000000000..c4ebf38be --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/google/protobuf/any.ts @@ -0,0 +1,240 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := anypb.New(foo) + * if err != nil { + * ... + * } + * ... + * foo := &pb.Foo{} + * if err := any.UnmarshalTo(foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface Any { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + typeUrl: string; + /** Must be a valid serialized protocol buffer of the above specified type. */ + value: Uint8Array; +} + +function createBaseAny(): Any { + return { typeUrl: "", value: new Uint8Array() }; +} + +export const Any = { + encode(message: Any, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.typeUrl !== "") { + writer.uint32(10).string(message.typeUrl); + } + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Any { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAny(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.typeUrl = reader.string(); + break; + case 2: + message.value = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Any { + return { + typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "", + value: isSet(object.value) ? bytesFromBase64(object.value) : new Uint8Array(), + }; + }, + + toJSON(message: Any): unknown { + const obj: any = {}; + message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl); + message.value !== undefined + && (obj.value = base64FromBytes(message.value !== undefined ? message.value : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): Any { + const message = createBaseAny(); + message.typeUrl = object.typeUrl ?? ""; + message.value = object.value ?? new Uint8Array(); + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/google/protobuf/descriptor.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/common/uint16_range.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/common/uint16_range.ts new file mode 100644 index 000000000..8f0c4fc98 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/common/uint16_range.ts @@ -0,0 +1,79 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.common"; + +export interface Uint16Range { + min: number; + max: number; +} + +function createBaseUint16Range(): Uint16Range { + return { min: 0, max: 0 }; +} + +export const Uint16Range = { + encode(message: Uint16Range, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.min !== 0) { + writer.uint32(8).int32(message.min); + } + if (message.max !== 0) { + writer.uint32(16).int32(message.max); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Uint16Range { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUint16Range(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.min = reader.int32(); + break; + case 2: + message.max = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Uint16Range { + return { min: isSet(object.min) ? Number(object.min) : 0, max: isSet(object.max) ? Number(object.max) : 0 }; + }, + + toJSON(message: Uint16Range): unknown { + const obj: any = {}; + message.min !== undefined && (obj.min = Math.round(message.min)); + message.max !== undefined && (obj.max = Math.round(message.max)); + return obj; + }, + + fromPartial, I>>(object: I): Uint16Range { + const message = createBaseUint16Range(); + message.min = object.min ?? 0; + message.max = object.max ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/account.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/account.ts new file mode 100644 index 000000000..0e2758407 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/account.ts @@ -0,0 +1,151 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { BaseAccount } from "../../../cosmos/auth/v1beta1/auth"; +import { Uint16Range } from "../common/uint16_range"; +import { Grant } from "./grant"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclauth"; + +export interface Account { + baseAccount: + | BaseAccount + | undefined; + /** + * NOTE. we do not user AccountRoles casting here to preserve repeated form + * so protobuf takes care about repeated items in generated code, + * (but that might be not the final solution) + */ + roles: string[]; + approvals: Grant[]; + vendorID: number; + rejects: Grant[]; + productIDs: Uint16Range[]; +} + +function createBaseAccount(): Account { + return { baseAccount: undefined, roles: [], approvals: [], vendorID: 0, rejects: [], productIDs: [] }; +} + +export const Account = { + encode(message: Account, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.baseAccount !== undefined) { + BaseAccount.encode(message.baseAccount, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.roles) { + writer.uint32(18).string(v!); + } + for (const v of message.approvals) { + Grant.encode(v!, writer.uint32(26).fork()).ldelim(); + } + if (message.vendorID !== 0) { + writer.uint32(32).int32(message.vendorID); + } + for (const v of message.rejects) { + Grant.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.productIDs) { + Uint16Range.encode(v!, writer.uint32(50).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Account { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.baseAccount = BaseAccount.decode(reader, reader.uint32()); + break; + case 2: + message.roles.push(reader.string()); + break; + case 3: + message.approvals.push(Grant.decode(reader, reader.uint32())); + break; + case 4: + message.vendorID = reader.int32(); + break; + case 5: + message.rejects.push(Grant.decode(reader, reader.uint32())); + break; + case 6: + message.productIDs.push(Uint16Range.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Account { + return { + baseAccount: isSet(object.baseAccount) ? BaseAccount.fromJSON(object.baseAccount) : undefined, + roles: Array.isArray(object?.roles) ? object.roles.map((e: any) => String(e)) : [], + approvals: Array.isArray(object?.approvals) ? object.approvals.map((e: any) => Grant.fromJSON(e)) : [], + vendorID: isSet(object.vendorID) ? Number(object.vendorID) : 0, + rejects: Array.isArray(object?.rejects) ? object.rejects.map((e: any) => Grant.fromJSON(e)) : [], + productIDs: Array.isArray(object?.productIDs) ? object.productIDs.map((e: any) => Uint16Range.fromJSON(e)) : [], + }; + }, + + toJSON(message: Account): unknown { + const obj: any = {}; + message.baseAccount !== undefined + && (obj.baseAccount = message.baseAccount ? BaseAccount.toJSON(message.baseAccount) : undefined); + if (message.roles) { + obj.roles = message.roles.map((e) => e); + } else { + obj.roles = []; + } + if (message.approvals) { + obj.approvals = message.approvals.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.approvals = []; + } + message.vendorID !== undefined && (obj.vendorID = Math.round(message.vendorID)); + if (message.rejects) { + obj.rejects = message.rejects.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.rejects = []; + } + if (message.productIDs) { + obj.productIDs = message.productIDs.map((e) => e ? Uint16Range.toJSON(e) : undefined); + } else { + obj.productIDs = []; + } + return obj; + }, + + fromPartial, I>>(object: I): Account { + const message = createBaseAccount(); + message.baseAccount = (object.baseAccount !== undefined && object.baseAccount !== null) + ? BaseAccount.fromPartial(object.baseAccount) + : undefined; + message.roles = object.roles?.map((e) => e) || []; + message.approvals = object.approvals?.map((e) => Grant.fromPartial(e)) || []; + message.vendorID = object.vendorID ?? 0; + message.rejects = object.rejects?.map((e) => Grant.fromPartial(e)) || []; + message.productIDs = object.productIDs?.map((e) => Uint16Range.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/account_stat.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/account_stat.ts new file mode 100644 index 000000000..1d4ab99af --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/account_stat.ts @@ -0,0 +1,102 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclauth"; + +export interface AccountStat { + number: number; +} + +function createBaseAccountStat(): AccountStat { + return { number: 0 }; +} + +export const AccountStat = { + encode(message: AccountStat, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.number !== 0) { + writer.uint32(8).uint64(message.number); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): AccountStat { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAccountStat(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.number = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): AccountStat { + return { number: isSet(object.number) ? Number(object.number) : 0 }; + }, + + toJSON(message: AccountStat): unknown { + const obj: any = {}; + message.number !== undefined && (obj.number = Math.round(message.number)); + return obj; + }, + + fromPartial, I>>(object: I): AccountStat { + const message = createBaseAccountStat(); + message.number = object.number ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/genesis.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/genesis.ts new file mode 100644 index 000000000..d73eef710 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/genesis.ts @@ -0,0 +1,171 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Account } from "./account"; +import { AccountStat } from "./account_stat"; +import { PendingAccount } from "./pending_account"; +import { PendingAccountRevocation } from "./pending_account_revocation"; +import { RejectedAccount } from "./rejected_account"; +import { RevokedAccount } from "./revoked_account"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclauth"; + +/** GenesisState defines the dclauth module's genesis state. */ +export interface GenesisState { + accountList: Account[]; + pendingAccountList: PendingAccount[]; + pendingAccountRevocationList: PendingAccountRevocation[]; + accountStat: AccountStat | undefined; + revokedAccountList: RevokedAccount[]; + /** this line is used by starport scaffolding # genesis/proto/state */ + rejectedAccountList: RejectedAccount[]; +} + +function createBaseGenesisState(): GenesisState { + return { + accountList: [], + pendingAccountList: [], + pendingAccountRevocationList: [], + accountStat: undefined, + revokedAccountList: [], + rejectedAccountList: [], + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.accountList) { + Account.encode(v!, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.pendingAccountList) { + PendingAccount.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.pendingAccountRevocationList) { + PendingAccountRevocation.encode(v!, writer.uint32(26).fork()).ldelim(); + } + if (message.accountStat !== undefined) { + AccountStat.encode(message.accountStat, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.revokedAccountList) { + RevokedAccount.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.rejectedAccountList) { + RejectedAccount.encode(v!, writer.uint32(50).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.accountList.push(Account.decode(reader, reader.uint32())); + break; + case 2: + message.pendingAccountList.push(PendingAccount.decode(reader, reader.uint32())); + break; + case 3: + message.pendingAccountRevocationList.push(PendingAccountRevocation.decode(reader, reader.uint32())); + break; + case 4: + message.accountStat = AccountStat.decode(reader, reader.uint32()); + break; + case 5: + message.revokedAccountList.push(RevokedAccount.decode(reader, reader.uint32())); + break; + case 6: + message.rejectedAccountList.push(RejectedAccount.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GenesisState { + return { + accountList: Array.isArray(object?.accountList) ? object.accountList.map((e: any) => Account.fromJSON(e)) : [], + pendingAccountList: Array.isArray(object?.pendingAccountList) + ? object.pendingAccountList.map((e: any) => PendingAccount.fromJSON(e)) + : [], + pendingAccountRevocationList: Array.isArray(object?.pendingAccountRevocationList) + ? object.pendingAccountRevocationList.map((e: any) => PendingAccountRevocation.fromJSON(e)) + : [], + accountStat: isSet(object.accountStat) ? AccountStat.fromJSON(object.accountStat) : undefined, + revokedAccountList: Array.isArray(object?.revokedAccountList) + ? object.revokedAccountList.map((e: any) => RevokedAccount.fromJSON(e)) + : [], + rejectedAccountList: Array.isArray(object?.rejectedAccountList) + ? object.rejectedAccountList.map((e: any) => RejectedAccount.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GenesisState): unknown { + const obj: any = {}; + if (message.accountList) { + obj.accountList = message.accountList.map((e) => e ? Account.toJSON(e) : undefined); + } else { + obj.accountList = []; + } + if (message.pendingAccountList) { + obj.pendingAccountList = message.pendingAccountList.map((e) => e ? PendingAccount.toJSON(e) : undefined); + } else { + obj.pendingAccountList = []; + } + if (message.pendingAccountRevocationList) { + obj.pendingAccountRevocationList = message.pendingAccountRevocationList.map((e) => + e ? PendingAccountRevocation.toJSON(e) : undefined + ); + } else { + obj.pendingAccountRevocationList = []; + } + message.accountStat !== undefined + && (obj.accountStat = message.accountStat ? AccountStat.toJSON(message.accountStat) : undefined); + if (message.revokedAccountList) { + obj.revokedAccountList = message.revokedAccountList.map((e) => e ? RevokedAccount.toJSON(e) : undefined); + } else { + obj.revokedAccountList = []; + } + if (message.rejectedAccountList) { + obj.rejectedAccountList = message.rejectedAccountList.map((e) => e ? RejectedAccount.toJSON(e) : undefined); + } else { + obj.rejectedAccountList = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GenesisState { + const message = createBaseGenesisState(); + message.accountList = object.accountList?.map((e) => Account.fromPartial(e)) || []; + message.pendingAccountList = object.pendingAccountList?.map((e) => PendingAccount.fromPartial(e)) || []; + message.pendingAccountRevocationList = + object.pendingAccountRevocationList?.map((e) => PendingAccountRevocation.fromPartial(e)) || []; + message.accountStat = (object.accountStat !== undefined && object.accountStat !== null) + ? AccountStat.fromPartial(object.accountStat) + : undefined; + message.revokedAccountList = object.revokedAccountList?.map((e) => RevokedAccount.fromPartial(e)) || []; + message.rejectedAccountList = object.rejectedAccountList?.map((e) => RejectedAccount.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/grant.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/grant.ts new file mode 100644 index 000000000..a38e736d6 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/grant.ts @@ -0,0 +1,125 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclauth"; + +export interface Grant { + address: string; + /** number of nanoseconds elapsed since January 1, 1970 UTC */ + time: number; + info: string; +} + +function createBaseGrant(): Grant { + return { address: "", time: 0, info: "" }; +} + +export const Grant = { + encode(message: Grant, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + if (message.time !== 0) { + writer.uint32(16).int64(message.time); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Grant { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGrant(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + case 2: + message.time = longToNumber(reader.int64() as Long); + break; + case 3: + message.info = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Grant { + return { + address: isSet(object.address) ? String(object.address) : "", + time: isSet(object.time) ? Number(object.time) : 0, + info: isSet(object.info) ? String(object.info) : "", + }; + }, + + toJSON(message: Grant): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + message.time !== undefined && (obj.time = Math.round(message.time)); + message.info !== undefined && (obj.info = message.info); + return obj; + }, + + fromPartial, I>>(object: I): Grant { + const message = createBaseGrant(); + message.address = object.address ?? ""; + message.time = object.time ?? 0; + message.info = object.info ?? ""; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/pending_account.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/pending_account.ts new file mode 100644 index 000000000..685462392 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/pending_account.ts @@ -0,0 +1,78 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Account } from "./account"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclauth"; + +/** + * TODO issue 99: do we need that ??? + * option (gogoproto.goproto_getters) = false; + * option (gogoproto.goproto_stringer) = false; + */ +export interface PendingAccount { + account: Account | undefined; +} + +function createBasePendingAccount(): PendingAccount { + return { account: undefined }; +} + +export const PendingAccount = { + encode(message: PendingAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.account !== undefined) { + Account.encode(message.account, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PendingAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePendingAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.account = Account.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PendingAccount { + return { account: isSet(object.account) ? Account.fromJSON(object.account) : undefined }; + }, + + toJSON(message: PendingAccount): unknown { + const obj: any = {}; + message.account !== undefined && (obj.account = message.account ? Account.toJSON(message.account) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): PendingAccount { + const message = createBasePendingAccount(); + message.account = (object.account !== undefined && object.account !== null) + ? Account.fromPartial(object.account) + : undefined; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/pending_account_revocation.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/pending_account_revocation.ts new file mode 100644 index 000000000..9c8592790 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/pending_account_revocation.ts @@ -0,0 +1,87 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Grant } from "./grant"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclauth"; + +export interface PendingAccountRevocation { + address: string; + approvals: Grant[]; +} + +function createBasePendingAccountRevocation(): PendingAccountRevocation { + return { address: "", approvals: [] }; +} + +export const PendingAccountRevocation = { + encode(message: PendingAccountRevocation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + for (const v of message.approvals) { + Grant.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PendingAccountRevocation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePendingAccountRevocation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + case 2: + message.approvals.push(Grant.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PendingAccountRevocation { + return { + address: isSet(object.address) ? String(object.address) : "", + approvals: Array.isArray(object?.approvals) ? object.approvals.map((e: any) => Grant.fromJSON(e)) : [], + }; + }, + + toJSON(message: PendingAccountRevocation): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + if (message.approvals) { + obj.approvals = message.approvals.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.approvals = []; + } + return obj; + }, + + fromPartial, I>>(object: I): PendingAccountRevocation { + const message = createBasePendingAccountRevocation(); + message.address = object.address ?? ""; + message.approvals = object.approvals?.map((e) => Grant.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/query.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/query.ts new file mode 100644 index 000000000..4d2617cf2 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/query.ts @@ -0,0 +1,1474 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { PageRequest, PageResponse } from "../../../cosmos/base/query/v1beta1/pagination"; +import { Account } from "./account"; +import { AccountStat } from "./account_stat"; +import { PendingAccount } from "./pending_account"; +import { PendingAccountRevocation } from "./pending_account_revocation"; +import { RejectedAccount } from "./rejected_account"; +import { RevokedAccount } from "./revoked_account"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclauth"; + +export interface QueryGetAccountRequest { + address: string; +} + +export interface QueryGetAccountResponse { + account: Account | undefined; +} + +export interface QueryAllAccountRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllAccountResponse { + account: Account[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetPendingAccountRequest { + address: string; +} + +export interface QueryGetPendingAccountResponse { + pendingAccount: PendingAccount | undefined; +} + +export interface QueryAllPendingAccountRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllPendingAccountResponse { + pendingAccount: PendingAccount[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetPendingAccountRevocationRequest { + address: string; +} + +export interface QueryGetPendingAccountRevocationResponse { + pendingAccountRevocation: PendingAccountRevocation | undefined; +} + +export interface QueryAllPendingAccountRevocationRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllPendingAccountRevocationResponse { + pendingAccountRevocation: PendingAccountRevocation[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetAccountStatRequest { +} + +export interface QueryGetAccountStatResponse { + AccountStat: AccountStat | undefined; +} + +export interface QueryGetRevokedAccountRequest { + address: string; +} + +export interface QueryGetRevokedAccountResponse { + revokedAccount: RevokedAccount | undefined; +} + +export interface QueryAllRevokedAccountRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllRevokedAccountResponse { + revokedAccount: RevokedAccount[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetRejectedAccountRequest { + address: string; +} + +export interface QueryGetRejectedAccountResponse { + rejectedAccount: RejectedAccount | undefined; +} + +export interface QueryAllRejectedAccountRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllRejectedAccountResponse { + rejectedAccount: RejectedAccount[]; + pagination: PageResponse | undefined; +} + +function createBaseQueryGetAccountRequest(): QueryGetAccountRequest { + return { address: "" }; +} + +export const QueryGetAccountRequest = { + encode(message: QueryGetAccountRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetAccountRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetAccountRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetAccountRequest { + return { address: isSet(object.address) ? String(object.address) : "" }; + }, + + toJSON(message: QueryGetAccountRequest): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + return obj; + }, + + fromPartial, I>>(object: I): QueryGetAccountRequest { + const message = createBaseQueryGetAccountRequest(); + message.address = object.address ?? ""; + return message; + }, +}; + +function createBaseQueryGetAccountResponse(): QueryGetAccountResponse { + return { account: undefined }; +} + +export const QueryGetAccountResponse = { + encode(message: QueryGetAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.account !== undefined) { + Account.encode(message.account, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.account = Account.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetAccountResponse { + return { account: isSet(object.account) ? Account.fromJSON(object.account) : undefined }; + }, + + toJSON(message: QueryGetAccountResponse): unknown { + const obj: any = {}; + message.account !== undefined && (obj.account = message.account ? Account.toJSON(message.account) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryGetAccountResponse { + const message = createBaseQueryGetAccountResponse(); + message.account = (object.account !== undefined && object.account !== null) + ? Account.fromPartial(object.account) + : undefined; + return message; + }, +}; + +function createBaseQueryAllAccountRequest(): QueryAllAccountRequest { + return { pagination: undefined }; +} + +export const QueryAllAccountRequest = { + encode(message: QueryAllAccountRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllAccountRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllAccountRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllAccountRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllAccountRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryAllAccountRequest { + const message = createBaseQueryAllAccountRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllAccountResponse(): QueryAllAccountResponse { + return { account: [], pagination: undefined }; +} + +export const QueryAllAccountResponse = { + encode(message: QueryAllAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.account) { + Account.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.account.push(Account.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllAccountResponse { + return { + account: Array.isArray(object?.account) ? object.account.map((e: any) => Account.fromJSON(e)) : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllAccountResponse): unknown { + const obj: any = {}; + if (message.account) { + obj.account = message.account.map((e) => e ? Account.toJSON(e) : undefined); + } else { + obj.account = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryAllAccountResponse { + const message = createBaseQueryAllAccountResponse(); + message.account = object.account?.map((e) => Account.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetPendingAccountRequest(): QueryGetPendingAccountRequest { + return { address: "" }; +} + +export const QueryGetPendingAccountRequest = { + encode(message: QueryGetPendingAccountRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetPendingAccountRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetPendingAccountRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetPendingAccountRequest { + return { address: isSet(object.address) ? String(object.address) : "" }; + }, + + toJSON(message: QueryGetPendingAccountRequest): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetPendingAccountRequest { + const message = createBaseQueryGetPendingAccountRequest(); + message.address = object.address ?? ""; + return message; + }, +}; + +function createBaseQueryGetPendingAccountResponse(): QueryGetPendingAccountResponse { + return { pendingAccount: undefined }; +} + +export const QueryGetPendingAccountResponse = { + encode(message: QueryGetPendingAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pendingAccount !== undefined) { + PendingAccount.encode(message.pendingAccount, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetPendingAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetPendingAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pendingAccount = PendingAccount.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetPendingAccountResponse { + return { + pendingAccount: isSet(object.pendingAccount) ? PendingAccount.fromJSON(object.pendingAccount) : undefined, + }; + }, + + toJSON(message: QueryGetPendingAccountResponse): unknown { + const obj: any = {}; + message.pendingAccount !== undefined + && (obj.pendingAccount = message.pendingAccount ? PendingAccount.toJSON(message.pendingAccount) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetPendingAccountResponse { + const message = createBaseQueryGetPendingAccountResponse(); + message.pendingAccount = (object.pendingAccount !== undefined && object.pendingAccount !== null) + ? PendingAccount.fromPartial(object.pendingAccount) + : undefined; + return message; + }, +}; + +function createBaseQueryAllPendingAccountRequest(): QueryAllPendingAccountRequest { + return { pagination: undefined }; +} + +export const QueryAllPendingAccountRequest = { + encode(message: QueryAllPendingAccountRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllPendingAccountRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllPendingAccountRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllPendingAccountRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllPendingAccountRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllPendingAccountRequest { + const message = createBaseQueryAllPendingAccountRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllPendingAccountResponse(): QueryAllPendingAccountResponse { + return { pendingAccount: [], pagination: undefined }; +} + +export const QueryAllPendingAccountResponse = { + encode(message: QueryAllPendingAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.pendingAccount) { + PendingAccount.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllPendingAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllPendingAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pendingAccount.push(PendingAccount.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllPendingAccountResponse { + return { + pendingAccount: Array.isArray(object?.pendingAccount) + ? object.pendingAccount.map((e: any) => PendingAccount.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllPendingAccountResponse): unknown { + const obj: any = {}; + if (message.pendingAccount) { + obj.pendingAccount = message.pendingAccount.map((e) => e ? PendingAccount.toJSON(e) : undefined); + } else { + obj.pendingAccount = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllPendingAccountResponse { + const message = createBaseQueryAllPendingAccountResponse(); + message.pendingAccount = object.pendingAccount?.map((e) => PendingAccount.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetPendingAccountRevocationRequest(): QueryGetPendingAccountRevocationRequest { + return { address: "" }; +} + +export const QueryGetPendingAccountRevocationRequest = { + encode(message: QueryGetPendingAccountRevocationRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetPendingAccountRevocationRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetPendingAccountRevocationRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetPendingAccountRevocationRequest { + return { address: isSet(object.address) ? String(object.address) : "" }; + }, + + toJSON(message: QueryGetPendingAccountRevocationRequest): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetPendingAccountRevocationRequest { + const message = createBaseQueryGetPendingAccountRevocationRequest(); + message.address = object.address ?? ""; + return message; + }, +}; + +function createBaseQueryGetPendingAccountRevocationResponse(): QueryGetPendingAccountRevocationResponse { + return { pendingAccountRevocation: undefined }; +} + +export const QueryGetPendingAccountRevocationResponse = { + encode(message: QueryGetPendingAccountRevocationResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pendingAccountRevocation !== undefined) { + PendingAccountRevocation.encode(message.pendingAccountRevocation, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetPendingAccountRevocationResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetPendingAccountRevocationResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pendingAccountRevocation = PendingAccountRevocation.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetPendingAccountRevocationResponse { + return { + pendingAccountRevocation: isSet(object.pendingAccountRevocation) + ? PendingAccountRevocation.fromJSON(object.pendingAccountRevocation) + : undefined, + }; + }, + + toJSON(message: QueryGetPendingAccountRevocationResponse): unknown { + const obj: any = {}; + message.pendingAccountRevocation !== undefined && (obj.pendingAccountRevocation = message.pendingAccountRevocation + ? PendingAccountRevocation.toJSON(message.pendingAccountRevocation) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetPendingAccountRevocationResponse { + const message = createBaseQueryGetPendingAccountRevocationResponse(); + message.pendingAccountRevocation = + (object.pendingAccountRevocation !== undefined && object.pendingAccountRevocation !== null) + ? PendingAccountRevocation.fromPartial(object.pendingAccountRevocation) + : undefined; + return message; + }, +}; + +function createBaseQueryAllPendingAccountRevocationRequest(): QueryAllPendingAccountRevocationRequest { + return { pagination: undefined }; +} + +export const QueryAllPendingAccountRevocationRequest = { + encode(message: QueryAllPendingAccountRevocationRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllPendingAccountRevocationRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllPendingAccountRevocationRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllPendingAccountRevocationRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllPendingAccountRevocationRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllPendingAccountRevocationRequest { + const message = createBaseQueryAllPendingAccountRevocationRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllPendingAccountRevocationResponse(): QueryAllPendingAccountRevocationResponse { + return { pendingAccountRevocation: [], pagination: undefined }; +} + +export const QueryAllPendingAccountRevocationResponse = { + encode(message: QueryAllPendingAccountRevocationResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.pendingAccountRevocation) { + PendingAccountRevocation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllPendingAccountRevocationResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllPendingAccountRevocationResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pendingAccountRevocation.push(PendingAccountRevocation.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllPendingAccountRevocationResponse { + return { + pendingAccountRevocation: Array.isArray(object?.pendingAccountRevocation) + ? object.pendingAccountRevocation.map((e: any) => PendingAccountRevocation.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllPendingAccountRevocationResponse): unknown { + const obj: any = {}; + if (message.pendingAccountRevocation) { + obj.pendingAccountRevocation = message.pendingAccountRevocation.map((e) => + e ? PendingAccountRevocation.toJSON(e) : undefined + ); + } else { + obj.pendingAccountRevocation = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllPendingAccountRevocationResponse { + const message = createBaseQueryAllPendingAccountRevocationResponse(); + message.pendingAccountRevocation = + object.pendingAccountRevocation?.map((e) => PendingAccountRevocation.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetAccountStatRequest(): QueryGetAccountStatRequest { + return {}; +} + +export const QueryGetAccountStatRequest = { + encode(_: QueryGetAccountStatRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetAccountStatRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetAccountStatRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): QueryGetAccountStatRequest { + return {}; + }, + + toJSON(_: QueryGetAccountStatRequest): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): QueryGetAccountStatRequest { + const message = createBaseQueryGetAccountStatRequest(); + return message; + }, +}; + +function createBaseQueryGetAccountStatResponse(): QueryGetAccountStatResponse { + return { AccountStat: undefined }; +} + +export const QueryGetAccountStatResponse = { + encode(message: QueryGetAccountStatResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.AccountStat !== undefined) { + AccountStat.encode(message.AccountStat, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetAccountStatResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetAccountStatResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.AccountStat = AccountStat.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetAccountStatResponse { + return { AccountStat: isSet(object.AccountStat) ? AccountStat.fromJSON(object.AccountStat) : undefined }; + }, + + toJSON(message: QueryGetAccountStatResponse): unknown { + const obj: any = {}; + message.AccountStat !== undefined + && (obj.AccountStat = message.AccountStat ? AccountStat.toJSON(message.AccountStat) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryGetAccountStatResponse { + const message = createBaseQueryGetAccountStatResponse(); + message.AccountStat = (object.AccountStat !== undefined && object.AccountStat !== null) + ? AccountStat.fromPartial(object.AccountStat) + : undefined; + return message; + }, +}; + +function createBaseQueryGetRevokedAccountRequest(): QueryGetRevokedAccountRequest { + return { address: "" }; +} + +export const QueryGetRevokedAccountRequest = { + encode(message: QueryGetRevokedAccountRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRevokedAccountRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRevokedAccountRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRevokedAccountRequest { + return { address: isSet(object.address) ? String(object.address) : "" }; + }, + + toJSON(message: QueryGetRevokedAccountRequest): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRevokedAccountRequest { + const message = createBaseQueryGetRevokedAccountRequest(); + message.address = object.address ?? ""; + return message; + }, +}; + +function createBaseQueryGetRevokedAccountResponse(): QueryGetRevokedAccountResponse { + return { revokedAccount: undefined }; +} + +export const QueryGetRevokedAccountResponse = { + encode(message: QueryGetRevokedAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.revokedAccount !== undefined) { + RevokedAccount.encode(message.revokedAccount, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRevokedAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRevokedAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.revokedAccount = RevokedAccount.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRevokedAccountResponse { + return { + revokedAccount: isSet(object.revokedAccount) ? RevokedAccount.fromJSON(object.revokedAccount) : undefined, + }; + }, + + toJSON(message: QueryGetRevokedAccountResponse): unknown { + const obj: any = {}; + message.revokedAccount !== undefined + && (obj.revokedAccount = message.revokedAccount ? RevokedAccount.toJSON(message.revokedAccount) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRevokedAccountResponse { + const message = createBaseQueryGetRevokedAccountResponse(); + message.revokedAccount = (object.revokedAccount !== undefined && object.revokedAccount !== null) + ? RevokedAccount.fromPartial(object.revokedAccount) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRevokedAccountRequest(): QueryAllRevokedAccountRequest { + return { pagination: undefined }; +} + +export const QueryAllRevokedAccountRequest = { + encode(message: QueryAllRevokedAccountRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRevokedAccountRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRevokedAccountRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRevokedAccountRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllRevokedAccountRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllRevokedAccountRequest { + const message = createBaseQueryAllRevokedAccountRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRevokedAccountResponse(): QueryAllRevokedAccountResponse { + return { revokedAccount: [], pagination: undefined }; +} + +export const QueryAllRevokedAccountResponse = { + encode(message: QueryAllRevokedAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.revokedAccount) { + RevokedAccount.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRevokedAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRevokedAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.revokedAccount.push(RevokedAccount.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRevokedAccountResponse { + return { + revokedAccount: Array.isArray(object?.revokedAccount) + ? object.revokedAccount.map((e: any) => RevokedAccount.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllRevokedAccountResponse): unknown { + const obj: any = {}; + if (message.revokedAccount) { + obj.revokedAccount = message.revokedAccount.map((e) => e ? RevokedAccount.toJSON(e) : undefined); + } else { + obj.revokedAccount = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllRevokedAccountResponse { + const message = createBaseQueryAllRevokedAccountResponse(); + message.revokedAccount = object.revokedAccount?.map((e) => RevokedAccount.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetRejectedAccountRequest(): QueryGetRejectedAccountRequest { + return { address: "" }; +} + +export const QueryGetRejectedAccountRequest = { + encode(message: QueryGetRejectedAccountRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRejectedAccountRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRejectedAccountRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRejectedAccountRequest { + return { address: isSet(object.address) ? String(object.address) : "" }; + }, + + toJSON(message: QueryGetRejectedAccountRequest): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRejectedAccountRequest { + const message = createBaseQueryGetRejectedAccountRequest(); + message.address = object.address ?? ""; + return message; + }, +}; + +function createBaseQueryGetRejectedAccountResponse(): QueryGetRejectedAccountResponse { + return { rejectedAccount: undefined }; +} + +export const QueryGetRejectedAccountResponse = { + encode(message: QueryGetRejectedAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.rejectedAccount !== undefined) { + RejectedAccount.encode(message.rejectedAccount, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRejectedAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRejectedAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rejectedAccount = RejectedAccount.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRejectedAccountResponse { + return { + rejectedAccount: isSet(object.rejectedAccount) ? RejectedAccount.fromJSON(object.rejectedAccount) : undefined, + }; + }, + + toJSON(message: QueryGetRejectedAccountResponse): unknown { + const obj: any = {}; + message.rejectedAccount !== undefined + && (obj.rejectedAccount = message.rejectedAccount ? RejectedAccount.toJSON(message.rejectedAccount) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRejectedAccountResponse { + const message = createBaseQueryGetRejectedAccountResponse(); + message.rejectedAccount = (object.rejectedAccount !== undefined && object.rejectedAccount !== null) + ? RejectedAccount.fromPartial(object.rejectedAccount) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRejectedAccountRequest(): QueryAllRejectedAccountRequest { + return { pagination: undefined }; +} + +export const QueryAllRejectedAccountRequest = { + encode(message: QueryAllRejectedAccountRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRejectedAccountRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRejectedAccountRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRejectedAccountRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllRejectedAccountRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllRejectedAccountRequest { + const message = createBaseQueryAllRejectedAccountRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRejectedAccountResponse(): QueryAllRejectedAccountResponse { + return { rejectedAccount: [], pagination: undefined }; +} + +export const QueryAllRejectedAccountResponse = { + encode(message: QueryAllRejectedAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rejectedAccount) { + RejectedAccount.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRejectedAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRejectedAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rejectedAccount.push(RejectedAccount.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRejectedAccountResponse { + return { + rejectedAccount: Array.isArray(object?.rejectedAccount) + ? object.rejectedAccount.map((e: any) => RejectedAccount.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllRejectedAccountResponse): unknown { + const obj: any = {}; + if (message.rejectedAccount) { + obj.rejectedAccount = message.rejectedAccount.map((e) => e ? RejectedAccount.toJSON(e) : undefined); + } else { + obj.rejectedAccount = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllRejectedAccountResponse { + const message = createBaseQueryAllRejectedAccountResponse(); + message.rejectedAccount = object.rejectedAccount?.map((e) => RejectedAccount.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +/** Query defines the gRPC querier service. */ +export interface Query { + /** Queries a account by index. */ + Account(request: QueryGetAccountRequest): Promise; + /** Queries a list of account items. */ + AccountAll(request: QueryAllAccountRequest): Promise; + /** Queries a pendingAccount by index. */ + PendingAccount(request: QueryGetPendingAccountRequest): Promise; + /** Queries a list of pendingAccount items. */ + PendingAccountAll(request: QueryAllPendingAccountRequest): Promise; + /** Queries a pendingAccountRevocation by index. */ + PendingAccountRevocation( + request: QueryGetPendingAccountRevocationRequest, + ): Promise; + /** Queries a list of pendingAccountRevocation items. */ + PendingAccountRevocationAll( + request: QueryAllPendingAccountRevocationRequest, + ): Promise; + /** Queries a accountStat by index. */ + AccountStat(request: QueryGetAccountStatRequest): Promise; + /** Queries a RevokedAccount by index. */ + RevokedAccount(request: QueryGetRevokedAccountRequest): Promise; + /** Queries a list of RevokedAccount items. */ + RevokedAccountAll(request: QueryAllRevokedAccountRequest): Promise; + /** Queries a RejectedAccount by index. */ + RejectedAccount(request: QueryGetRejectedAccountRequest): Promise; + /** Queries a list of RejectedAccount items. */ + RejectedAccountAll(request: QueryAllRejectedAccountRequest): Promise; +} + +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.Account = this.Account.bind(this); + this.AccountAll = this.AccountAll.bind(this); + this.PendingAccount = this.PendingAccount.bind(this); + this.PendingAccountAll = this.PendingAccountAll.bind(this); + this.PendingAccountRevocation = this.PendingAccountRevocation.bind(this); + this.PendingAccountRevocationAll = this.PendingAccountRevocationAll.bind(this); + this.AccountStat = this.AccountStat.bind(this); + this.RevokedAccount = this.RevokedAccount.bind(this); + this.RevokedAccountAll = this.RevokedAccountAll.bind(this); + this.RejectedAccount = this.RejectedAccount.bind(this); + this.RejectedAccountAll = this.RejectedAccountAll.bind(this); + } + Account(request: QueryGetAccountRequest): Promise { + const data = QueryGetAccountRequest.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.dclauth.Query", "Account", data); + return promise.then((data) => QueryGetAccountResponse.decode(new _m0.Reader(data))); + } + + AccountAll(request: QueryAllAccountRequest): Promise { + const data = QueryAllAccountRequest.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.dclauth.Query", "AccountAll", data); + return promise.then((data) => QueryAllAccountResponse.decode(new _m0.Reader(data))); + } + + PendingAccount(request: QueryGetPendingAccountRequest): Promise { + const data = QueryGetPendingAccountRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclauth.Query", + "PendingAccount", + data, + ); + return promise.then((data) => QueryGetPendingAccountResponse.decode(new _m0.Reader(data))); + } + + PendingAccountAll(request: QueryAllPendingAccountRequest): Promise { + const data = QueryAllPendingAccountRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclauth.Query", + "PendingAccountAll", + data, + ); + return promise.then((data) => QueryAllPendingAccountResponse.decode(new _m0.Reader(data))); + } + + PendingAccountRevocation( + request: QueryGetPendingAccountRevocationRequest, + ): Promise { + const data = QueryGetPendingAccountRevocationRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclauth.Query", + "PendingAccountRevocation", + data, + ); + return promise.then((data) => QueryGetPendingAccountRevocationResponse.decode(new _m0.Reader(data))); + } + + PendingAccountRevocationAll( + request: QueryAllPendingAccountRevocationRequest, + ): Promise { + const data = QueryAllPendingAccountRevocationRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclauth.Query", + "PendingAccountRevocationAll", + data, + ); + return promise.then((data) => QueryAllPendingAccountRevocationResponse.decode(new _m0.Reader(data))); + } + + AccountStat(request: QueryGetAccountStatRequest): Promise { + const data = QueryGetAccountStatRequest.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.dclauth.Query", "AccountStat", data); + return promise.then((data) => QueryGetAccountStatResponse.decode(new _m0.Reader(data))); + } + + RevokedAccount(request: QueryGetRevokedAccountRequest): Promise { + const data = QueryGetRevokedAccountRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclauth.Query", + "RevokedAccount", + data, + ); + return promise.then((data) => QueryGetRevokedAccountResponse.decode(new _m0.Reader(data))); + } + + RevokedAccountAll(request: QueryAllRevokedAccountRequest): Promise { + const data = QueryAllRevokedAccountRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclauth.Query", + "RevokedAccountAll", + data, + ); + return promise.then((data) => QueryAllRevokedAccountResponse.decode(new _m0.Reader(data))); + } + + RejectedAccount(request: QueryGetRejectedAccountRequest): Promise { + const data = QueryGetRejectedAccountRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclauth.Query", + "RejectedAccount", + data, + ); + return promise.then((data) => QueryGetRejectedAccountResponse.decode(new _m0.Reader(data))); + } + + RejectedAccountAll(request: QueryAllRejectedAccountRequest): Promise { + const data = QueryAllRejectedAccountRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclauth.Query", + "RejectedAccountAll", + data, + ); + return promise.then((data) => QueryAllRejectedAccountResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/rejected_account.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/rejected_account.ts new file mode 100644 index 000000000..a0a27a697 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/rejected_account.ts @@ -0,0 +1,73 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Account } from "./account"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclauth"; + +export interface RejectedAccount { + account: Account | undefined; +} + +function createBaseRejectedAccount(): RejectedAccount { + return { account: undefined }; +} + +export const RejectedAccount = { + encode(message: RejectedAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.account !== undefined) { + Account.encode(message.account, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RejectedAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRejectedAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.account = Account.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RejectedAccount { + return { account: isSet(object.account) ? Account.fromJSON(object.account) : undefined }; + }, + + toJSON(message: RejectedAccount): unknown { + const obj: any = {}; + message.account !== undefined && (obj.account = message.account ? Account.toJSON(message.account) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): RejectedAccount { + const message = createBaseRejectedAccount(); + message.account = (object.account !== undefined && object.account !== null) + ? Account.fromPartial(object.account) + : undefined; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/revoked_account.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/revoked_account.ts new file mode 100644 index 000000000..13060d136 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/revoked_account.ts @@ -0,0 +1,135 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Account } from "./account"; +import { Grant } from "./grant"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclauth"; + +export interface RevokedAccount { + account: Account | undefined; + revokeApprovals: Grant[]; + reason: RevokedAccount_Reason; +} + +export enum RevokedAccount_Reason { + TrusteeVoting = 0, + MaliciousValidator = 1, + UNRECOGNIZED = -1, +} + +export function revokedAccount_ReasonFromJSON(object: any): RevokedAccount_Reason { + switch (object) { + case 0: + case "TrusteeVoting": + return RevokedAccount_Reason.TrusteeVoting; + case 1: + case "MaliciousValidator": + return RevokedAccount_Reason.MaliciousValidator; + case -1: + case "UNRECOGNIZED": + default: + return RevokedAccount_Reason.UNRECOGNIZED; + } +} + +export function revokedAccount_ReasonToJSON(object: RevokedAccount_Reason): string { + switch (object) { + case RevokedAccount_Reason.TrusteeVoting: + return "TrusteeVoting"; + case RevokedAccount_Reason.MaliciousValidator: + return "MaliciousValidator"; + case RevokedAccount_Reason.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +function createBaseRevokedAccount(): RevokedAccount { + return { account: undefined, revokeApprovals: [], reason: 0 }; +} + +export const RevokedAccount = { + encode(message: RevokedAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.account !== undefined) { + Account.encode(message.account, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.revokeApprovals) { + Grant.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.reason !== 0) { + writer.uint32(24).int32(message.reason); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RevokedAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRevokedAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.account = Account.decode(reader, reader.uint32()); + break; + case 2: + message.revokeApprovals.push(Grant.decode(reader, reader.uint32())); + break; + case 3: + message.reason = reader.int32() as any; + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RevokedAccount { + return { + account: isSet(object.account) ? Account.fromJSON(object.account) : undefined, + revokeApprovals: Array.isArray(object?.revokeApprovals) + ? object.revokeApprovals.map((e: any) => Grant.fromJSON(e)) + : [], + reason: isSet(object.reason) ? revokedAccount_ReasonFromJSON(object.reason) : 0, + }; + }, + + toJSON(message: RevokedAccount): unknown { + const obj: any = {}; + message.account !== undefined && (obj.account = message.account ? Account.toJSON(message.account) : undefined); + if (message.revokeApprovals) { + obj.revokeApprovals = message.revokeApprovals.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.revokeApprovals = []; + } + message.reason !== undefined && (obj.reason = revokedAccount_ReasonToJSON(message.reason)); + return obj; + }, + + fromPartial, I>>(object: I): RevokedAccount { + const message = createBaseRevokedAccount(); + message.account = (object.account !== undefined && object.account !== null) + ? Account.fromPartial(object.account) + : undefined; + message.revokeApprovals = object.revokeApprovals?.map((e) => Grant.fromPartial(e)) || []; + message.reason = object.reason ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/tx.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/tx.ts new file mode 100644 index 000000000..c8cc4cc27 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclauth/types/zigbeealliance/distributedcomplianceledger/dclauth/tx.ts @@ -0,0 +1,803 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Any } from "../../../google/protobuf/any"; +import { Uint16Range } from "../common/uint16_range"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclauth"; + +export interface MsgProposeAddAccount { + signer: string; + address: string; + pubKey: Any | undefined; + roles: string[]; + vendorID: number; + info: string; + time: number; + productIDs: Uint16Range[]; +} + +export interface MsgProposeAddAccountResponse { +} + +export interface MsgApproveAddAccount { + signer: string; + address: string; + info: string; + time: number; +} + +export interface MsgApproveAddAccountResponse { +} + +export interface MsgProposeRevokeAccount { + signer: string; + address: string; + info: string; + time: number; +} + +export interface MsgProposeRevokeAccountResponse { +} + +export interface MsgApproveRevokeAccount { + signer: string; + address: string; + info: string; + time: number; +} + +export interface MsgApproveRevokeAccountResponse { +} + +export interface MsgRejectAddAccount { + signer: string; + address: string; + info: string; + time: number; +} + +export interface MsgRejectAddAccountResponse { +} + +function createBaseMsgProposeAddAccount(): MsgProposeAddAccount { + return { signer: "", address: "", pubKey: undefined, roles: [], vendorID: 0, info: "", time: 0, productIDs: [] }; +} + +export const MsgProposeAddAccount = { + encode(message: MsgProposeAddAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + if (message.pubKey !== undefined) { + Any.encode(message.pubKey, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.roles) { + writer.uint32(34).string(v!); + } + if (message.vendorID !== 0) { + writer.uint32(40).int32(message.vendorID); + } + if (message.info !== "") { + writer.uint32(50).string(message.info); + } + if (message.time !== 0) { + writer.uint32(56).int64(message.time); + } + for (const v of message.productIDs) { + Uint16Range.encode(v!, writer.uint32(66).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgProposeAddAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProposeAddAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.address = reader.string(); + break; + case 3: + message.pubKey = Any.decode(reader, reader.uint32()); + break; + case 4: + message.roles.push(reader.string()); + break; + case 5: + message.vendorID = reader.int32(); + break; + case 6: + message.info = reader.string(); + break; + case 7: + message.time = longToNumber(reader.int64() as Long); + break; + case 8: + message.productIDs.push(Uint16Range.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgProposeAddAccount { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + address: isSet(object.address) ? String(object.address) : "", + pubKey: isSet(object.pubKey) ? Any.fromJSON(object.pubKey) : undefined, + roles: Array.isArray(object?.roles) ? object.roles.map((e: any) => String(e)) : [], + vendorID: isSet(object.vendorID) ? Number(object.vendorID) : 0, + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + productIDs: Array.isArray(object?.productIDs) ? object.productIDs.map((e: any) => Uint16Range.fromJSON(e)) : [], + }; + }, + + toJSON(message: MsgProposeAddAccount): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.address !== undefined && (obj.address = message.address); + message.pubKey !== undefined && (obj.pubKey = message.pubKey ? Any.toJSON(message.pubKey) : undefined); + if (message.roles) { + obj.roles = message.roles.map((e) => e); + } else { + obj.roles = []; + } + message.vendorID !== undefined && (obj.vendorID = Math.round(message.vendorID)); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + if (message.productIDs) { + obj.productIDs = message.productIDs.map((e) => e ? Uint16Range.toJSON(e) : undefined); + } else { + obj.productIDs = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MsgProposeAddAccount { + const message = createBaseMsgProposeAddAccount(); + message.signer = object.signer ?? ""; + message.address = object.address ?? ""; + message.pubKey = (object.pubKey !== undefined && object.pubKey !== null) + ? Any.fromPartial(object.pubKey) + : undefined; + message.roles = object.roles?.map((e) => e) || []; + message.vendorID = object.vendorID ?? 0; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + message.productIDs = object.productIDs?.map((e) => Uint16Range.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMsgProposeAddAccountResponse(): MsgProposeAddAccountResponse { + return {}; +} + +export const MsgProposeAddAccountResponse = { + encode(_: MsgProposeAddAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgProposeAddAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProposeAddAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgProposeAddAccountResponse { + return {}; + }, + + toJSON(_: MsgProposeAddAccountResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgProposeAddAccountResponse { + const message = createBaseMsgProposeAddAccountResponse(); + return message; + }, +}; + +function createBaseMsgApproveAddAccount(): MsgApproveAddAccount { + return { signer: "", address: "", info: "", time: 0 }; +} + +export const MsgApproveAddAccount = { + encode(message: MsgApproveAddAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + if (message.time !== 0) { + writer.uint32(32).int64(message.time); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgApproveAddAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgApproveAddAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.address = reader.string(); + break; + case 3: + message.info = reader.string(); + break; + case 4: + message.time = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgApproveAddAccount { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + address: isSet(object.address) ? String(object.address) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + }; + }, + + toJSON(message: MsgApproveAddAccount): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.address !== undefined && (obj.address = message.address); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + return obj; + }, + + fromPartial, I>>(object: I): MsgApproveAddAccount { + const message = createBaseMsgApproveAddAccount(); + message.signer = object.signer ?? ""; + message.address = object.address ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + return message; + }, +}; + +function createBaseMsgApproveAddAccountResponse(): MsgApproveAddAccountResponse { + return {}; +} + +export const MsgApproveAddAccountResponse = { + encode(_: MsgApproveAddAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgApproveAddAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgApproveAddAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgApproveAddAccountResponse { + return {}; + }, + + toJSON(_: MsgApproveAddAccountResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgApproveAddAccountResponse { + const message = createBaseMsgApproveAddAccountResponse(); + return message; + }, +}; + +function createBaseMsgProposeRevokeAccount(): MsgProposeRevokeAccount { + return { signer: "", address: "", info: "", time: 0 }; +} + +export const MsgProposeRevokeAccount = { + encode(message: MsgProposeRevokeAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + if (message.time !== 0) { + writer.uint32(32).int64(message.time); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgProposeRevokeAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProposeRevokeAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.address = reader.string(); + break; + case 3: + message.info = reader.string(); + break; + case 4: + message.time = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgProposeRevokeAccount { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + address: isSet(object.address) ? String(object.address) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + }; + }, + + toJSON(message: MsgProposeRevokeAccount): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.address !== undefined && (obj.address = message.address); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + return obj; + }, + + fromPartial, I>>(object: I): MsgProposeRevokeAccount { + const message = createBaseMsgProposeRevokeAccount(); + message.signer = object.signer ?? ""; + message.address = object.address ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + return message; + }, +}; + +function createBaseMsgProposeRevokeAccountResponse(): MsgProposeRevokeAccountResponse { + return {}; +} + +export const MsgProposeRevokeAccountResponse = { + encode(_: MsgProposeRevokeAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgProposeRevokeAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProposeRevokeAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgProposeRevokeAccountResponse { + return {}; + }, + + toJSON(_: MsgProposeRevokeAccountResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgProposeRevokeAccountResponse { + const message = createBaseMsgProposeRevokeAccountResponse(); + return message; + }, +}; + +function createBaseMsgApproveRevokeAccount(): MsgApproveRevokeAccount { + return { signer: "", address: "", info: "", time: 0 }; +} + +export const MsgApproveRevokeAccount = { + encode(message: MsgApproveRevokeAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + if (message.time !== 0) { + writer.uint32(32).int64(message.time); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgApproveRevokeAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgApproveRevokeAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.address = reader.string(); + break; + case 3: + message.info = reader.string(); + break; + case 4: + message.time = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgApproveRevokeAccount { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + address: isSet(object.address) ? String(object.address) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + }; + }, + + toJSON(message: MsgApproveRevokeAccount): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.address !== undefined && (obj.address = message.address); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + return obj; + }, + + fromPartial, I>>(object: I): MsgApproveRevokeAccount { + const message = createBaseMsgApproveRevokeAccount(); + message.signer = object.signer ?? ""; + message.address = object.address ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + return message; + }, +}; + +function createBaseMsgApproveRevokeAccountResponse(): MsgApproveRevokeAccountResponse { + return {}; +} + +export const MsgApproveRevokeAccountResponse = { + encode(_: MsgApproveRevokeAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgApproveRevokeAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgApproveRevokeAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgApproveRevokeAccountResponse { + return {}; + }, + + toJSON(_: MsgApproveRevokeAccountResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgApproveRevokeAccountResponse { + const message = createBaseMsgApproveRevokeAccountResponse(); + return message; + }, +}; + +function createBaseMsgRejectAddAccount(): MsgRejectAddAccount { + return { signer: "", address: "", info: "", time: 0 }; +} + +export const MsgRejectAddAccount = { + encode(message: MsgRejectAddAccount, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + if (message.time !== 0) { + writer.uint32(32).int64(message.time); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRejectAddAccount { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRejectAddAccount(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.address = reader.string(); + break; + case 3: + message.info = reader.string(); + break; + case 4: + message.time = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgRejectAddAccount { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + address: isSet(object.address) ? String(object.address) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + }; + }, + + toJSON(message: MsgRejectAddAccount): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.address !== undefined && (obj.address = message.address); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + return obj; + }, + + fromPartial, I>>(object: I): MsgRejectAddAccount { + const message = createBaseMsgRejectAddAccount(); + message.signer = object.signer ?? ""; + message.address = object.address ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + return message; + }, +}; + +function createBaseMsgRejectAddAccountResponse(): MsgRejectAddAccountResponse { + return {}; +} + +export const MsgRejectAddAccountResponse = { + encode(_: MsgRejectAddAccountResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRejectAddAccountResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRejectAddAccountResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgRejectAddAccountResponse { + return {}; + }, + + toJSON(_: MsgRejectAddAccountResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgRejectAddAccountResponse { + const message = createBaseMsgRejectAddAccountResponse(); + return message; + }, +}; + +/** Msg defines the Msg service. */ +export interface Msg { + ProposeAddAccount(request: MsgProposeAddAccount): Promise; + ApproveAddAccount(request: MsgApproveAddAccount): Promise; + ProposeRevokeAccount(request: MsgProposeRevokeAccount): Promise; + ApproveRevokeAccount(request: MsgApproveRevokeAccount): Promise; + /** this line is used by starport scaffolding # proto/tx/rpc */ + RejectAddAccount(request: MsgRejectAddAccount): Promise; +} + +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.ProposeAddAccount = this.ProposeAddAccount.bind(this); + this.ApproveAddAccount = this.ApproveAddAccount.bind(this); + this.ProposeRevokeAccount = this.ProposeRevokeAccount.bind(this); + this.ApproveRevokeAccount = this.ApproveRevokeAccount.bind(this); + this.RejectAddAccount = this.RejectAddAccount.bind(this); + } + ProposeAddAccount(request: MsgProposeAddAccount): Promise { + const data = MsgProposeAddAccount.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclauth.Msg", + "ProposeAddAccount", + data, + ); + return promise.then((data) => MsgProposeAddAccountResponse.decode(new _m0.Reader(data))); + } + + ApproveAddAccount(request: MsgApproveAddAccount): Promise { + const data = MsgApproveAddAccount.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclauth.Msg", + "ApproveAddAccount", + data, + ); + return promise.then((data) => MsgApproveAddAccountResponse.decode(new _m0.Reader(data))); + } + + ProposeRevokeAccount(request: MsgProposeRevokeAccount): Promise { + const data = MsgProposeRevokeAccount.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclauth.Msg", + "ProposeRevokeAccount", + data, + ); + return promise.then((data) => MsgProposeRevokeAccountResponse.decode(new _m0.Reader(data))); + } + + ApproveRevokeAccount(request: MsgApproveRevokeAccount): Promise { + const data = MsgApproveRevokeAccount.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclauth.Msg", + "ApproveRevokeAccount", + data, + ); + return promise.then((data) => MsgApproveRevokeAccountResponse.decode(new _m0.Reader(data))); + } + + RejectAddAccount(request: MsgRejectAddAccount): Promise { + const data = MsgRejectAddAccount.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclauth.Msg", + "RejectAddAccount", + data, + ); + return promise.then((data) => MsgRejectAddAccountResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/api.swagger.yml b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/api.swagger.yml new file mode 100644 index 000000000..f9dff1688 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/api.swagger.yml @@ -0,0 +1,28 @@ +swagger: '2.0' +info: + title: HTTP API Console zigbeealliance.distributedcomplianceledger.dclgenutil + name: '' + description: '' +definitions: + Any: + type: object + properties: + '@type': + type: string + additionalProperties: {} + Status: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/index.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/module.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/module.ts new file mode 100755 index 000000000..26f67752d --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/module.ts @@ -0,0 +1,96 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; + + +export { }; + + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + ZigbeeallianceDistributedcomplianceledgerDclgenutil: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/registry.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/registry.ts new file mode 100755 index 000000000..26157e4ff --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/registry.ts @@ -0,0 +1,7 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; + +const msgTypes: Array<[string, GeneratedType]> = [ + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/rest.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/rest.ts new file mode 100644 index 000000000..cf252e2d5 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/rest.ts @@ -0,0 +1,147 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +export interface ProtobufAny { + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title zigbeealliance/distributedcomplianceledger/dclgenutil/genesis.proto + * @version version not set + */ +export class Api extends HttpClient {} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types.ts new file mode 100755 index 000000000..fbb084a7e --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types.ts @@ -0,0 +1,5 @@ + + +export { + + } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/gogoproto/gogo.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/google/api/annotations.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/google/api/annotations.ts new file mode 100644 index 000000000..aace4787f --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/google/api/annotations.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "google.api"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/google/api/http.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/google/api/http.ts new file mode 100644 index 000000000..17e770d15 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/google/api/http.ts @@ -0,0 +1,589 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get: + | string + | undefined; + /** Used for updating a resource. */ + put: + | string + | undefined; + /** Used for creating a resource. */ + post: + | string + | undefined; + /** Used for deleting a resource. */ + delete: + | string + | undefined; + /** Used for updating a resource. */ + patch: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined + && (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined + && (obj.custom = message.custom ? CustomHttpPattern.toJSON(message.custom) : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.additionalBindings = []; + } + return obj; + }, + + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/google/protobuf/descriptor.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/zigbeealliance/distributedcomplianceledger/dclgenutil/genesis.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/zigbeealliance/distributedcomplianceledger/dclgenutil/genesis.ts new file mode 100644 index 000000000..daf5b34e2 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/zigbeealliance/distributedcomplianceledger/dclgenutil/genesis.ts @@ -0,0 +1,116 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclgenutil"; + +/** GenesisState defines the dclgenutil module's genesis state. */ +export interface GenesisState { + /** gen_txs defines the genesis transactions. */ + genTxs: Uint8Array[]; +} + +function createBaseGenesisState(): GenesisState { + return { genTxs: [] }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.genTxs) { + writer.uint32(10).bytes(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.genTxs.push(reader.bytes()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GenesisState { + return { genTxs: Array.isArray(object?.genTxs) ? object.genTxs.map((e: any) => bytesFromBase64(e)) : [] }; + }, + + toJSON(message: GenesisState): unknown { + const obj: any = {}; + if (message.genTxs) { + obj.genTxs = message.genTxs.map((e) => base64FromBytes(e !== undefined ? e : new Uint8Array())); + } else { + obj.genTxs = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GenesisState { + const message = createBaseGenesisState(); + message.genTxs = object.genTxs?.map((e) => e) || []; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/zigbeealliance/distributedcomplianceledger/dclgenutil/query.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/zigbeealliance/distributedcomplianceledger/dclgenutil/query.ts new file mode 100644 index 000000000..1d415fb21 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/zigbeealliance/distributedcomplianceledger/dclgenutil/query.ts @@ -0,0 +1,17 @@ +/* eslint-disable */ +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclgenutil"; + +/** Query defines the gRPC querier service. */ +export interface Query { +} + +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/zigbeealliance/distributedcomplianceledger/dclgenutil/tx.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/zigbeealliance/distributedcomplianceledger/dclgenutil/tx.ts new file mode 100644 index 000000000..154061ea6 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclgenutil/types/zigbeealliance/distributedcomplianceledger/dclgenutil/tx.ts @@ -0,0 +1,17 @@ +/* eslint-disable */ +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclgenutil"; + +/** Msg defines the Msg service. */ +export interface Msg { +} + +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/api.swagger.yml b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/api.swagger.yml new file mode 100644 index 000000000..adafab0d2 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/api.swagger.yml @@ -0,0 +1,1154 @@ +swagger: '2.0' +info: + title: HTTP API Console zigbeealliance.distributedcomplianceledger.dclupgrade + name: '' + description: '' +paths: + /dcl/dclupgrade/approved-upgrades: + get: + operationId: ApprovedUpgradeAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + approvedUpgrade: + type: array + items: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/dclupgrade/approved-upgrades/{name}: + get: + operationId: ApprovedUpgrade + responses: + '200': + description: A successful response. + schema: + type: object + properties: + approvedUpgrade: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: name + in: path + required: true + type: string + tags: + - Query + /dcl/dclupgrade/proposed-upgrades: + get: + operationId: ProposedUpgradeAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + proposedUpgrade: + type: array + items: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/dclupgrade/proposed-upgrades/{name}: + get: + operationId: ProposedUpgrade + responses: + '200': + description: A successful response. + schema: + type: object + properties: + proposedUpgrade: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: name + in: path + required: true + type: string + tags: + - Query + /dcl/dclupgrade/rejected-upgrades: + get: + operationId: RejectedUpgradeAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + rejectedUpgrade: + type: array + items: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/dclupgrade/rejected-upgrades/{name}: + get: + operationId: RejectedUpgrade + responses: + '200': + description: A successful response. + schema: + type: object + properties: + rejectedUpgrade: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: name + in: path + required: true + type: string + tags: + - Query +definitions: + Any: + type: object + properties: + '@type': + type: string + additionalProperties: {} + Status: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + Grant: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + PageRequest: + type: object + properties: + key: + type: string + format: byte + offset: + type: string + format: uint64 + limit: + type: string + format: uint64 + count_total: + type: boolean + reverse: + type: boolean + PageResponse: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + Plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + QueryAllApprovedUpgradeResponse: + type: object + properties: + approvedUpgrade: + type: array + items: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllProposedUpgradeResponse: + type: object + properties: + proposedUpgrade: + type: array + items: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllRejectedUpgradeResponse: + type: object + properties: + rejectedUpgrade: + type: array + items: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryGetApprovedUpgradeResponse: + type: object + properties: + approvedUpgrade: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + QueryGetProposedUpgradeResponse: + type: object + properties: + proposedUpgrade: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + QueryGetRejectedUpgradeResponse: + type: object + properties: + rejectedUpgrade: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + dclupgrade.ApprovedUpgrade: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + dclupgrade.ProposedUpgrade: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + dclupgrade.RejectedUpgrade: + type: object + properties: + plan: + type: object + properties: + name: + type: string + time: + type: string + format: date-time + height: + type: string + format: int64 + info: + type: string + upgraded_client_state: + type: object + properties: + '@type': + type: string + additionalProperties: {} + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + MsgApproveUpgradeResponse: + type: object + MsgProposeUpgradeResponse: + type: object + MsgRejectUpgradeResponse: + type: object diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/index.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/module.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/module.ts new file mode 100755 index 000000000..c66dbc0d4 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/module.ts @@ -0,0 +1,203 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; +import { MsgApproveUpgrade } from "./types/zigbeealliance/distributedcomplianceledger/dclupgrade/tx"; +import { MsgProposeUpgrade } from "./types/zigbeealliance/distributedcomplianceledger/dclupgrade/tx"; +import { MsgRejectUpgrade } from "./types/zigbeealliance/distributedcomplianceledger/dclupgrade/tx"; + +import { ApprovedUpgrade as typeApprovedUpgrade} from "./types" +import { Grant as typeGrant} from "./types" +import { ProposedUpgrade as typeProposedUpgrade} from "./types" +import { RejectedUpgrade as typeRejectedUpgrade} from "./types" + +export { MsgApproveUpgrade, MsgProposeUpgrade, MsgRejectUpgrade }; + +type sendMsgApproveUpgradeParams = { + value: MsgApproveUpgrade, + fee?: StdFee, + memo?: string +}; + +type sendMsgProposeUpgradeParams = { + value: MsgProposeUpgrade, + fee?: StdFee, + memo?: string +}; + +type sendMsgRejectUpgradeParams = { + value: MsgRejectUpgrade, + fee?: StdFee, + memo?: string +}; + + +type msgApproveUpgradeParams = { + value: MsgApproveUpgrade, +}; + +type msgProposeUpgradeParams = { + value: MsgProposeUpgrade, +}; + +type msgRejectUpgradeParams = { + value: MsgRejectUpgrade, +}; + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + async sendMsgApproveUpgrade({ value, fee, memo }: sendMsgApproveUpgradeParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgApproveUpgrade: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgApproveUpgrade({ value: MsgApproveUpgrade.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgApproveUpgrade: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgProposeUpgrade({ value, fee, memo }: sendMsgProposeUpgradeParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgProposeUpgrade: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgProposeUpgrade({ value: MsgProposeUpgrade.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgProposeUpgrade: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgRejectUpgrade({ value, fee, memo }: sendMsgRejectUpgradeParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgRejectUpgrade: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgRejectUpgrade({ value: MsgRejectUpgrade.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgRejectUpgrade: Could not broadcast Tx: '+ e.message) + } + }, + + + msgApproveUpgrade({ value }: msgApproveUpgradeParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.dclupgrade.MsgApproveUpgrade", value: MsgApproveUpgrade.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgApproveUpgrade: Could not create message: ' + e.message) + } + }, + + msgProposeUpgrade({ value }: msgProposeUpgradeParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.dclupgrade.MsgProposeUpgrade", value: MsgProposeUpgrade.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgProposeUpgrade: Could not create message: ' + e.message) + } + }, + + msgRejectUpgrade({ value }: msgRejectUpgradeParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.dclupgrade.MsgRejectUpgrade", value: MsgRejectUpgrade.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgRejectUpgrade: Could not create message: ' + e.message) + } + }, + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + ApprovedUpgrade: getStructure(typeApprovedUpgrade.fromPartial({})), + Grant: getStructure(typeGrant.fromPartial({})), + ProposedUpgrade: getStructure(typeProposedUpgrade.fromPartial({})), + RejectedUpgrade: getStructure(typeRejectedUpgrade.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + ZigbeeallianceDistributedcomplianceledgerDclupgrade: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/registry.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/registry.ts new file mode 100755 index 000000000..e81fcdafd --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/registry.ts @@ -0,0 +1,13 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; +import { MsgApproveUpgrade } from "./types/zigbeealliance/distributedcomplianceledger/dclupgrade/tx"; +import { MsgProposeUpgrade } from "./types/zigbeealliance/distributedcomplianceledger/dclupgrade/tx"; +import { MsgRejectUpgrade } from "./types/zigbeealliance/distributedcomplianceledger/dclupgrade/tx"; + +const msgTypes: Array<[string, GeneratedType]> = [ + ["/zigbeealliance.distributedcomplianceledger.dclupgrade.MsgApproveUpgrade", MsgApproveUpgrade], + ["/zigbeealliance.distributedcomplianceledger.dclupgrade.MsgProposeUpgrade", MsgProposeUpgrade], + ["/zigbeealliance.distributedcomplianceledger.dclupgrade.MsgRejectUpgrade", MsgRejectUpgrade], + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/rest.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/rest.ts new file mode 100644 index 000000000..c97dbc523 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/rest.ts @@ -0,0 +1,596 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +export interface DclupgradeGrant { + address?: string; + + /** + * number of nanoseconds elapsed since January 1, 1970 UTC + * @format int64 + */ + time?: string; + info?: string; +} + +export type DclupgradeMsgApproveUpgradeResponse = object; + +export type DclupgradeMsgProposeUpgradeResponse = object; + +export type DclupgradeMsgRejectUpgradeResponse = object; + +export interface DclupgradeQueryAllApprovedUpgradeResponse { + approvedUpgrade?: DistributedcomplianceledgerdclupgradeApprovedUpgrade[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface DclupgradeQueryAllProposedUpgradeResponse { + proposedUpgrade?: DistributedcomplianceledgerdclupgradeProposedUpgrade[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface DclupgradeQueryAllRejectedUpgradeResponse { + rejectedUpgrade?: DistributedcomplianceledgerdclupgradeRejectedUpgrade[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface DclupgradeQueryGetApprovedUpgradeResponse { + approvedUpgrade?: DistributedcomplianceledgerdclupgradeApprovedUpgrade; +} + +export interface DclupgradeQueryGetProposedUpgradeResponse { + proposedUpgrade?: DistributedcomplianceledgerdclupgradeProposedUpgrade; +} + +export interface DclupgradeQueryGetRejectedUpgradeResponse { + rejectedUpgrade?: DistributedcomplianceledgerdclupgradeRejectedUpgrade; +} + +export interface DistributedcomplianceledgerdclupgradeApprovedUpgrade { + /** Plan specifies information about a planned upgrade and when it should occur. */ + plan?: V1Beta1Plan; + creator?: string; + approvals?: DclupgradeGrant[]; + rejects?: DclupgradeGrant[]; +} + +export interface DistributedcomplianceledgerdclupgradeProposedUpgrade { + /** Plan specifies information about a planned upgrade and when it should occur. */ + plan?: V1Beta1Plan; + creator?: string; + approvals?: DclupgradeGrant[]; + rejects?: DclupgradeGrant[]; +} + +export interface DistributedcomplianceledgerdclupgradeRejectedUpgrade { + /** Plan specifies information about a planned upgrade and when it should occur. */ + plan?: V1Beta1Plan; + creator?: string; + approvals?: DclupgradeGrant[]; + rejects?: DclupgradeGrant[]; +} + +/** +* `Any` contains an arbitrary serialized protocol buffer message along with a +URL that describes the type of the serialized message. + +Protobuf library provides support to pack/unpack Any values in the form +of utility functions or additional generated methods of the Any type. + +Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + +Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + +The pack methods provided by protobuf library will by default use +'type.googleapis.com/full.type.name' as the type URL and the unpack +methods only use the fully qualified type name after the last '/' +in the type URL, for example "foo.bar.com/x/y.z" will yield type +name "y.z". + + +JSON +==== +The JSON representation of an `Any` value uses the regular +representation of the deserialized, embedded message, with an +additional field `@type` which contains the type URL. Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + +If the embedded message type is well-known and has a custom JSON +representation, that representation will be embedded adding a field +`value` which holds the custom JSON in addition to the `@type` +field. Example (for message [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } +*/ +export interface ProtobufAny { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +/** +* message SomeRequest { + Foo some_parameter = 1; + PageRequest pagination = 2; + } +*/ +export interface V1Beta1PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + * @format byte + */ + key?: string; + + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + * @format uint64 + */ + offset?: string; + + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + * @format uint64 + */ + limit?: string; + + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + count_total?: boolean; + + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse?: boolean; +} + +/** +* PageResponse is to be embedded in gRPC response messages where the +corresponding request message has used PageRequest. + + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } +*/ +export interface V1Beta1PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + * @format byte + */ + next_key?: string; + + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + * @format uint64 + */ + total?: string; +} + +/** + * Plan specifies information about a planned upgrade and when it should occur. + */ +export interface V1Beta1Plan { + /** + * Sets the name for the upgrade. This name will be used by the upgraded + * version of the software to apply any special "on-upgrade" commands during + * the first BeginBlock method after the upgrade is applied. It is also used + * to detect whether a software version can handle a given upgrade. If no + * upgrade handler with this name has been set in the software, it will be + * assumed that the software is out-of-date when the upgrade Time or Height is + * reached and the software will exit. + */ + name?: string; + + /** + * Deprecated: Time based upgrades have been deprecated. Time based upgrade logic + * has been removed from the SDK. + * If this field is not empty, an error will be thrown. + * @format date-time + */ + time?: string; + + /** + * The height at which the upgrade must be performed. + * @format int64 + */ + height?: string; + + /** + * Any application specific upgrade info to be included on-chain + * such as a git commit that validators could automatically upgrade to + */ + info?: string; + + /** + * Deprecated: UpgradedClientState field has been deprecated. IBC upgrade logic has been + * moved to the IBC module in the sub module 02-client. + * If this field is not empty, an error will be thrown. + */ + upgraded_client_state?: ProtobufAny; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title zigbeealliance/distributedcomplianceledger/dclupgrade/approved_upgrade.proto + * @version version not set + */ +export class Api extends HttpClient { + /** + * No description + * + * @tags Query + * @name QueryApprovedUpgradeAll + * @summary Queries a list of ApprovedUpgrade items. + * @request GET:/dcl/dclupgrade/approved-upgrades + */ + queryApprovedUpgradeAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/dclupgrade/approved-upgrades`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryApprovedUpgrade + * @summary Queries a ApprovedUpgrade by index. + * @request GET:/dcl/dclupgrade/approved-upgrades/{name} + */ + queryApprovedUpgrade = (name: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/dclupgrade/approved-upgrades/${name}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryProposedUpgradeAll + * @summary Queries a list of ProposedUpgrade items. + * @request GET:/dcl/dclupgrade/proposed-upgrades + */ + queryProposedUpgradeAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/dclupgrade/proposed-upgrades`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryProposedUpgrade + * @summary Queries a ProposedUpgrade by index. + * @request GET:/dcl/dclupgrade/proposed-upgrades/{name} + */ + queryProposedUpgrade = (name: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/dclupgrade/proposed-upgrades/${name}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRejectedUpgradeAll + * @summary Queries a list of RejectedUpgrade items. + * @request GET:/dcl/dclupgrade/rejected-upgrades + */ + queryRejectedUpgradeAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/dclupgrade/rejected-upgrades`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRejectedUpgrade + * @summary Queries a RejectedUpgrade by index. + * @request GET:/dcl/dclupgrade/rejected-upgrades/{name} + */ + queryRejectedUpgrade = (name: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/dclupgrade/rejected-upgrades/${name}`, + method: "GET", + format: "json", + ...params, + }); +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types.ts new file mode 100755 index 000000000..f50edd0a8 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types.ts @@ -0,0 +1,13 @@ +import { ApprovedUpgrade } from "./types/zigbeealliance/distributedcomplianceledger/dclupgrade/approved_upgrade" +import { Grant } from "./types/zigbeealliance/distributedcomplianceledger/dclupgrade/grant" +import { ProposedUpgrade } from "./types/zigbeealliance/distributedcomplianceledger/dclupgrade/proposed_upgrade" +import { RejectedUpgrade } from "./types/zigbeealliance/distributedcomplianceledger/dclupgrade/rejected_upgrade" + + +export { + ApprovedUpgrade, + Grant, + ProposedUpgrade, + RejectedUpgrade, + + } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/amino/amino.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/amino/amino.ts new file mode 100644 index 000000000..4b67dcfe4 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/amino/amino.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "amino"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/cosmos/base/query/v1beta1/pagination.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/cosmos/base/query/v1beta1/pagination.ts new file mode 100644 index 000000000..a31ca249d --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/cosmos/base/query/v1beta1/pagination.ts @@ -0,0 +1,286 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.base.query.v1beta1"; + +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset: number; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: number; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + countTotal: boolean; + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse: boolean; +} + +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total: number; +} + +function createBasePageRequest(): PageRequest { + return { key: new Uint8Array(), offset: 0, limit: 0, countTotal: false, reverse: false }; +} + +export const PageRequest = { + encode(message: PageRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.offset !== 0) { + writer.uint32(16).uint64(message.offset); + } + if (message.limit !== 0) { + writer.uint32(24).uint64(message.limit); + } + if (message.countTotal === true) { + writer.uint32(32).bool(message.countTotal); + } + if (message.reverse === true) { + writer.uint32(40).bool(message.reverse); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + case 2: + message.offset = longToNumber(reader.uint64() as Long); + break; + case 3: + message.limit = longToNumber(reader.uint64() as Long); + break; + case 4: + message.countTotal = reader.bool(); + break; + case 5: + message.reverse = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageRequest { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + offset: isSet(object.offset) ? Number(object.offset) : 0, + limit: isSet(object.limit) ? Number(object.limit) : 0, + countTotal: isSet(object.countTotal) ? Boolean(object.countTotal) : false, + reverse: isSet(object.reverse) ? Boolean(object.reverse) : false, + }; + }, + + toJSON(message: PageRequest): unknown { + const obj: any = {}; + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.offset !== undefined && (obj.offset = Math.round(message.offset)); + message.limit !== undefined && (obj.limit = Math.round(message.limit)); + message.countTotal !== undefined && (obj.countTotal = message.countTotal); + message.reverse !== undefined && (obj.reverse = message.reverse); + return obj; + }, + + fromPartial, I>>(object: I): PageRequest { + const message = createBasePageRequest(); + message.key = object.key ?? new Uint8Array(); + message.offset = object.offset ?? 0; + message.limit = object.limit ?? 0; + message.countTotal = object.countTotal ?? false; + message.reverse = object.reverse ?? false; + return message; + }, +}; + +function createBasePageResponse(): PageResponse { + return { nextKey: new Uint8Array(), total: 0 }; +} + +export const PageResponse = { + encode(message: PageResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextKey.length !== 0) { + writer.uint32(10).bytes(message.nextKey); + } + if (message.total !== 0) { + writer.uint32(16).uint64(message.total); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nextKey = reader.bytes(); + break; + case 2: + message.total = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageResponse { + return { + nextKey: isSet(object.nextKey) ? bytesFromBase64(object.nextKey) : new Uint8Array(), + total: isSet(object.total) ? Number(object.total) : 0, + }; + }, + + toJSON(message: PageResponse): unknown { + const obj: any = {}; + message.nextKey !== undefined + && (obj.nextKey = base64FromBytes(message.nextKey !== undefined ? message.nextKey : new Uint8Array())); + message.total !== undefined && (obj.total = Math.round(message.total)); + return obj; + }, + + fromPartial, I>>(object: I): PageResponse { + const message = createBasePageResponse(); + message.nextKey = object.nextKey ?? new Uint8Array(); + message.total = object.total ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/cosmos/upgrade/v1beta1/upgrade.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/cosmos/upgrade/v1beta1/upgrade.ts new file mode 100644 index 000000000..e578e0a38 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/cosmos/upgrade/v1beta1/upgrade.ts @@ -0,0 +1,431 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Any } from "../../../google/protobuf/any"; +import { Timestamp } from "../../../google/protobuf/timestamp"; + +export const protobufPackage = "cosmos.upgrade.v1beta1"; + +/** Plan specifies information about a planned upgrade and when it should occur. */ +export interface Plan { + /** + * Sets the name for the upgrade. This name will be used by the upgraded + * version of the software to apply any special "on-upgrade" commands during + * the first BeginBlock method after the upgrade is applied. It is also used + * to detect whether a software version can handle a given upgrade. If no + * upgrade handler with this name has been set in the software, it will be + * assumed that the software is out-of-date when the upgrade Time or Height is + * reached and the software will exit. + */ + name: string; + /** + * Deprecated: Time based upgrades have been deprecated. Time based upgrade logic + * has been removed from the SDK. + * If this field is not empty, an error will be thrown. + * + * @deprecated + */ + time: + | Date + | undefined; + /** The height at which the upgrade must be performed. */ + height: number; + /** + * Any application specific upgrade info to be included on-chain + * such as a git commit that validators could automatically upgrade to + */ + info: string; + /** + * Deprecated: UpgradedClientState field has been deprecated. IBC upgrade logic has been + * moved to the IBC module in the sub module 02-client. + * If this field is not empty, an error will be thrown. + * + * @deprecated + */ + upgradedClientState: Any | undefined; +} + +/** + * SoftwareUpgradeProposal is a gov Content type for initiating a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgSoftwareUpgrade. + * + * @deprecated + */ +export interface SoftwareUpgradeProposal { + /** title of the proposal */ + title: string; + /** description of the proposal */ + description: string; + /** plan of the proposal */ + plan: Plan | undefined; +} + +/** + * CancelSoftwareUpgradeProposal is a gov Content type for cancelling a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgCancelUpgrade. + * + * @deprecated + */ +export interface CancelSoftwareUpgradeProposal { + /** title of the proposal */ + title: string; + /** description of the proposal */ + description: string; +} + +/** + * ModuleVersion specifies a module and its consensus version. + * + * Since: cosmos-sdk 0.43 + */ +export interface ModuleVersion { + /** name of the app module */ + name: string; + /** consensus version of the app module */ + version: number; +} + +function createBasePlan(): Plan { + return { name: "", time: undefined, height: 0, info: "", upgradedClientState: undefined }; +} + +export const Plan = { + encode(message: Plan, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.time !== undefined) { + Timestamp.encode(toTimestamp(message.time), writer.uint32(18).fork()).ldelim(); + } + if (message.height !== 0) { + writer.uint32(24).int64(message.height); + } + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + if (message.upgradedClientState !== undefined) { + Any.encode(message.upgradedClientState, writer.uint32(42).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Plan { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePlan(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.time = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + break; + case 3: + message.height = longToNumber(reader.int64() as Long); + break; + case 4: + message.info = reader.string(); + break; + case 5: + message.upgradedClientState = Any.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Plan { + return { + name: isSet(object.name) ? String(object.name) : "", + time: isSet(object.time) ? fromJsonTimestamp(object.time) : undefined, + height: isSet(object.height) ? Number(object.height) : 0, + info: isSet(object.info) ? String(object.info) : "", + upgradedClientState: isSet(object.upgradedClientState) ? Any.fromJSON(object.upgradedClientState) : undefined, + }; + }, + + toJSON(message: Plan): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.time !== undefined && (obj.time = message.time.toISOString()); + message.height !== undefined && (obj.height = Math.round(message.height)); + message.info !== undefined && (obj.info = message.info); + message.upgradedClientState !== undefined + && (obj.upgradedClientState = message.upgradedClientState ? Any.toJSON(message.upgradedClientState) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): Plan { + const message = createBasePlan(); + message.name = object.name ?? ""; + message.time = object.time ?? undefined; + message.height = object.height ?? 0; + message.info = object.info ?? ""; + message.upgradedClientState = (object.upgradedClientState !== undefined && object.upgradedClientState !== null) + ? Any.fromPartial(object.upgradedClientState) + : undefined; + return message; + }, +}; + +function createBaseSoftwareUpgradeProposal(): SoftwareUpgradeProposal { + return { title: "", description: "", plan: undefined }; +} + +export const SoftwareUpgradeProposal = { + encode(message: SoftwareUpgradeProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + if (message.plan !== undefined) { + Plan.encode(message.plan, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SoftwareUpgradeProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSoftwareUpgradeProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + message.plan = Plan.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SoftwareUpgradeProposal { + return { + title: isSet(object.title) ? String(object.title) : "", + description: isSet(object.description) ? String(object.description) : "", + plan: isSet(object.plan) ? Plan.fromJSON(object.plan) : undefined, + }; + }, + + toJSON(message: SoftwareUpgradeProposal): unknown { + const obj: any = {}; + message.title !== undefined && (obj.title = message.title); + message.description !== undefined && (obj.description = message.description); + message.plan !== undefined && (obj.plan = message.plan ? Plan.toJSON(message.plan) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): SoftwareUpgradeProposal { + const message = createBaseSoftwareUpgradeProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + message.plan = (object.plan !== undefined && object.plan !== null) ? Plan.fromPartial(object.plan) : undefined; + return message; + }, +}; + +function createBaseCancelSoftwareUpgradeProposal(): CancelSoftwareUpgradeProposal { + return { title: "", description: "" }; +} + +export const CancelSoftwareUpgradeProposal = { + encode(message: CancelSoftwareUpgradeProposal, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.title !== "") { + writer.uint32(10).string(message.title); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CancelSoftwareUpgradeProposal { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCancelSoftwareUpgradeProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CancelSoftwareUpgradeProposal { + return { + title: isSet(object.title) ? String(object.title) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: CancelSoftwareUpgradeProposal): unknown { + const obj: any = {}; + message.title !== undefined && (obj.title = message.title); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>( + object: I, + ): CancelSoftwareUpgradeProposal { + const message = createBaseCancelSoftwareUpgradeProposal(); + message.title = object.title ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseModuleVersion(): ModuleVersion { + return { name: "", version: 0 }; +} + +export const ModuleVersion = { + encode(message: ModuleVersion, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.version !== 0) { + writer.uint32(16).uint64(message.version); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModuleVersion { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleVersion(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.version = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ModuleVersion { + return { + name: isSet(object.name) ? String(object.name) : "", + version: isSet(object.version) ? Number(object.version) : 0, + }; + }, + + toJSON(message: ModuleVersion): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.version !== undefined && (obj.version = Math.round(message.version)); + return obj; + }, + + fromPartial, I>>(object: I): ModuleVersion { + const message = createBaseModuleVersion(); + message.name = object.name ?? ""; + message.version = object.version ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function toTimestamp(date: Date): Timestamp { + const seconds = date.getTime() / 1_000; + const nanos = (date.getTime() % 1_000) * 1_000_000; + return { seconds, nanos }; +} + +function fromTimestamp(t: Timestamp): Date { + let millis = t.seconds * 1_000; + millis += t.nanos / 1_000_000; + return new Date(millis); +} + +function fromJsonTimestamp(o: any): Date { + if (o instanceof Date) { + return o; + } else if (typeof o === "string") { + return new Date(o); + } else { + return fromTimestamp(Timestamp.fromJSON(o)); + } +} + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/cosmos_proto/cosmos.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..79c8d3486 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/cosmos_proto/cosmos.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/gogoproto/gogo.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/api/annotations.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/api/annotations.ts new file mode 100644 index 000000000..aace4787f --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/api/annotations.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "google.api"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/api/http.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/api/http.ts new file mode 100644 index 000000000..17e770d15 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/api/http.ts @@ -0,0 +1,589 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get: + | string + | undefined; + /** Used for updating a resource. */ + put: + | string + | undefined; + /** Used for creating a resource. */ + post: + | string + | undefined; + /** Used for deleting a resource. */ + delete: + | string + | undefined; + /** Used for updating a resource. */ + patch: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined + && (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined + && (obj.custom = message.custom ? CustomHttpPattern.toJSON(message.custom) : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.additionalBindings = []; + } + return obj; + }, + + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/protobuf/any.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/protobuf/any.ts new file mode 100644 index 000000000..c4ebf38be --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/protobuf/any.ts @@ -0,0 +1,240 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := anypb.New(foo) + * if err != nil { + * ... + * } + * ... + * foo := &pb.Foo{} + * if err := any.UnmarshalTo(foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface Any { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + typeUrl: string; + /** Must be a valid serialized protocol buffer of the above specified type. */ + value: Uint8Array; +} + +function createBaseAny(): Any { + return { typeUrl: "", value: new Uint8Array() }; +} + +export const Any = { + encode(message: Any, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.typeUrl !== "") { + writer.uint32(10).string(message.typeUrl); + } + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Any { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAny(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.typeUrl = reader.string(); + break; + case 2: + message.value = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Any { + return { + typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "", + value: isSet(object.value) ? bytesFromBase64(object.value) : new Uint8Array(), + }; + }, + + toJSON(message: Any): unknown { + const obj: any = {}; + message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl); + message.value !== undefined + && (obj.value = base64FromBytes(message.value !== undefined ? message.value : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): Any { + const message = createBaseAny(); + message.typeUrl = object.typeUrl ?? ""; + message.value = object.value ?? new Uint8Array(); + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/protobuf/descriptor.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/protobuf/timestamp.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/protobuf/timestamp.ts new file mode 100644 index 000000000..b00bb1b72 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/google/protobuf/timestamp.ts @@ -0,0 +1,216 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * Example 5: Compute Timestamp from Java `Instant.now()`. + * + * Instant now = Instant.now(); + * + * Timestamp timestamp = + * Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + * .setNanos(now.getNano()).build(); + * + * Example 6: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ +export interface Timestamp { + /** + * Represents seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + */ + seconds: number; + /** + * Non-negative fractions of a second at nanosecond resolution. Negative + * second values with fractions must still have non-negative nanos values + * that count forward in time. Must be from 0 to 999,999,999 + * inclusive. + */ + nanos: number; +} + +function createBaseTimestamp(): Timestamp { + return { seconds: 0, nanos: 0 }; +} + +export const Timestamp = { + encode(message: Timestamp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.seconds !== 0) { + writer.uint32(8).int64(message.seconds); + } + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Timestamp { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTimestamp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.seconds = longToNumber(reader.int64() as Long); + break; + case 2: + message.nanos = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Timestamp { + return { + seconds: isSet(object.seconds) ? Number(object.seconds) : 0, + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; + }, + + toJSON(message: Timestamp): unknown { + const obj: any = {}; + message.seconds !== undefined && (obj.seconds = Math.round(message.seconds)); + message.nanos !== undefined && (obj.nanos = Math.round(message.nanos)); + return obj; + }, + + fromPartial, I>>(object: I): Timestamp { + const message = createBaseTimestamp(); + message.seconds = object.seconds ?? 0; + message.nanos = object.nanos ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/approved_upgrade.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/approved_upgrade.ts new file mode 100644 index 000000000..c79a667b8 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/approved_upgrade.ts @@ -0,0 +1,112 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Plan } from "../../../cosmos/upgrade/v1beta1/upgrade"; +import { Grant } from "./grant"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclupgrade"; + +export interface ApprovedUpgrade { + plan: Plan | undefined; + creator: string; + approvals: Grant[]; + rejects: Grant[]; +} + +function createBaseApprovedUpgrade(): ApprovedUpgrade { + return { plan: undefined, creator: "", approvals: [], rejects: [] }; +} + +export const ApprovedUpgrade = { + encode(message: ApprovedUpgrade, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.plan !== undefined) { + Plan.encode(message.plan, writer.uint32(10).fork()).ldelim(); + } + if (message.creator !== "") { + writer.uint32(18).string(message.creator); + } + for (const v of message.approvals) { + Grant.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.rejects) { + Grant.encode(v!, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ApprovedUpgrade { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseApprovedUpgrade(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.plan = Plan.decode(reader, reader.uint32()); + break; + case 2: + message.creator = reader.string(); + break; + case 3: + message.approvals.push(Grant.decode(reader, reader.uint32())); + break; + case 4: + message.rejects.push(Grant.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ApprovedUpgrade { + return { + plan: isSet(object.plan) ? Plan.fromJSON(object.plan) : undefined, + creator: isSet(object.creator) ? String(object.creator) : "", + approvals: Array.isArray(object?.approvals) ? object.approvals.map((e: any) => Grant.fromJSON(e)) : [], + rejects: Array.isArray(object?.rejects) ? object.rejects.map((e: any) => Grant.fromJSON(e)) : [], + }; + }, + + toJSON(message: ApprovedUpgrade): unknown { + const obj: any = {}; + message.plan !== undefined && (obj.plan = message.plan ? Plan.toJSON(message.plan) : undefined); + message.creator !== undefined && (obj.creator = message.creator); + if (message.approvals) { + obj.approvals = message.approvals.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.approvals = []; + } + if (message.rejects) { + obj.rejects = message.rejects.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.rejects = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ApprovedUpgrade { + const message = createBaseApprovedUpgrade(); + message.plan = (object.plan !== undefined && object.plan !== null) ? Plan.fromPartial(object.plan) : undefined; + message.creator = object.creator ?? ""; + message.approvals = object.approvals?.map((e) => Grant.fromPartial(e)) || []; + message.rejects = object.rejects?.map((e) => Grant.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/genesis.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/genesis.ts new file mode 100644 index 000000000..b9121d536 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/genesis.ts @@ -0,0 +1,111 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { ApprovedUpgrade } from "./approved_upgrade"; +import { ProposedUpgrade } from "./proposed_upgrade"; +import { RejectedUpgrade } from "./rejected_upgrade"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclupgrade"; + +/** GenesisState defines the dclupgrade module's genesis state. */ +export interface GenesisState { + proposedUpgradeList: ProposedUpgrade[]; + approvedUpgradeList: ApprovedUpgrade[]; + /** this line is used by starport scaffolding # genesis/proto/state */ + rejectedUpgradeList: RejectedUpgrade[]; +} + +function createBaseGenesisState(): GenesisState { + return { proposedUpgradeList: [], approvedUpgradeList: [], rejectedUpgradeList: [] }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.proposedUpgradeList) { + ProposedUpgrade.encode(v!, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.approvedUpgradeList) { + ApprovedUpgrade.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.rejectedUpgradeList) { + RejectedUpgrade.encode(v!, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.proposedUpgradeList.push(ProposedUpgrade.decode(reader, reader.uint32())); + break; + case 2: + message.approvedUpgradeList.push(ApprovedUpgrade.decode(reader, reader.uint32())); + break; + case 3: + message.rejectedUpgradeList.push(RejectedUpgrade.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GenesisState { + return { + proposedUpgradeList: Array.isArray(object?.proposedUpgradeList) + ? object.proposedUpgradeList.map((e: any) => ProposedUpgrade.fromJSON(e)) + : [], + approvedUpgradeList: Array.isArray(object?.approvedUpgradeList) + ? object.approvedUpgradeList.map((e: any) => ApprovedUpgrade.fromJSON(e)) + : [], + rejectedUpgradeList: Array.isArray(object?.rejectedUpgradeList) + ? object.rejectedUpgradeList.map((e: any) => RejectedUpgrade.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GenesisState): unknown { + const obj: any = {}; + if (message.proposedUpgradeList) { + obj.proposedUpgradeList = message.proposedUpgradeList.map((e) => e ? ProposedUpgrade.toJSON(e) : undefined); + } else { + obj.proposedUpgradeList = []; + } + if (message.approvedUpgradeList) { + obj.approvedUpgradeList = message.approvedUpgradeList.map((e) => e ? ApprovedUpgrade.toJSON(e) : undefined); + } else { + obj.approvedUpgradeList = []; + } + if (message.rejectedUpgradeList) { + obj.rejectedUpgradeList = message.rejectedUpgradeList.map((e) => e ? RejectedUpgrade.toJSON(e) : undefined); + } else { + obj.rejectedUpgradeList = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GenesisState { + const message = createBaseGenesisState(); + message.proposedUpgradeList = object.proposedUpgradeList?.map((e) => ProposedUpgrade.fromPartial(e)) || []; + message.approvedUpgradeList = object.approvedUpgradeList?.map((e) => ApprovedUpgrade.fromPartial(e)) || []; + message.rejectedUpgradeList = object.rejectedUpgradeList?.map((e) => RejectedUpgrade.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/grant.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/grant.ts new file mode 100644 index 000000000..f17b680a5 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/grant.ts @@ -0,0 +1,125 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclupgrade"; + +export interface Grant { + address: string; + /** number of nanoseconds elapsed since January 1, 1970 UTC */ + time: number; + info: string; +} + +function createBaseGrant(): Grant { + return { address: "", time: 0, info: "" }; +} + +export const Grant = { + encode(message: Grant, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + if (message.time !== 0) { + writer.uint32(16).int64(message.time); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Grant { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGrant(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + case 2: + message.time = longToNumber(reader.int64() as Long); + break; + case 3: + message.info = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Grant { + return { + address: isSet(object.address) ? String(object.address) : "", + time: isSet(object.time) ? Number(object.time) : 0, + info: isSet(object.info) ? String(object.info) : "", + }; + }, + + toJSON(message: Grant): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + message.time !== undefined && (obj.time = Math.round(message.time)); + message.info !== undefined && (obj.info = message.info); + return obj; + }, + + fromPartial, I>>(object: I): Grant { + const message = createBaseGrant(); + message.address = object.address ?? ""; + message.time = object.time ?? 0; + message.info = object.info ?? ""; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/proposed_upgrade.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/proposed_upgrade.ts new file mode 100644 index 000000000..266202eb7 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/proposed_upgrade.ts @@ -0,0 +1,112 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Plan } from "../../../cosmos/upgrade/v1beta1/upgrade"; +import { Grant } from "./grant"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclupgrade"; + +export interface ProposedUpgrade { + plan: Plan | undefined; + creator: string; + approvals: Grant[]; + rejects: Grant[]; +} + +function createBaseProposedUpgrade(): ProposedUpgrade { + return { plan: undefined, creator: "", approvals: [], rejects: [] }; +} + +export const ProposedUpgrade = { + encode(message: ProposedUpgrade, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.plan !== undefined) { + Plan.encode(message.plan, writer.uint32(10).fork()).ldelim(); + } + if (message.creator !== "") { + writer.uint32(18).string(message.creator); + } + for (const v of message.approvals) { + Grant.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.rejects) { + Grant.encode(v!, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProposedUpgrade { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProposedUpgrade(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.plan = Plan.decode(reader, reader.uint32()); + break; + case 2: + message.creator = reader.string(); + break; + case 3: + message.approvals.push(Grant.decode(reader, reader.uint32())); + break; + case 4: + message.rejects.push(Grant.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ProposedUpgrade { + return { + plan: isSet(object.plan) ? Plan.fromJSON(object.plan) : undefined, + creator: isSet(object.creator) ? String(object.creator) : "", + approvals: Array.isArray(object?.approvals) ? object.approvals.map((e: any) => Grant.fromJSON(e)) : [], + rejects: Array.isArray(object?.rejects) ? object.rejects.map((e: any) => Grant.fromJSON(e)) : [], + }; + }, + + toJSON(message: ProposedUpgrade): unknown { + const obj: any = {}; + message.plan !== undefined && (obj.plan = message.plan ? Plan.toJSON(message.plan) : undefined); + message.creator !== undefined && (obj.creator = message.creator); + if (message.approvals) { + obj.approvals = message.approvals.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.approvals = []; + } + if (message.rejects) { + obj.rejects = message.rejects.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.rejects = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ProposedUpgrade { + const message = createBaseProposedUpgrade(); + message.plan = (object.plan !== undefined && object.plan !== null) ? Plan.fromPartial(object.plan) : undefined; + message.creator = object.creator ?? ""; + message.approvals = object.approvals?.map((e) => Grant.fromPartial(e)) || []; + message.rejects = object.rejects?.map((e) => Grant.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/query.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/query.ts new file mode 100644 index 000000000..7e9488e7d --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/query.ts @@ -0,0 +1,838 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { PageRequest, PageResponse } from "../../../cosmos/base/query/v1beta1/pagination"; +import { ApprovedUpgrade } from "./approved_upgrade"; +import { ProposedUpgrade } from "./proposed_upgrade"; +import { RejectedUpgrade } from "./rejected_upgrade"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclupgrade"; + +export interface QueryGetProposedUpgradeRequest { + name: string; +} + +export interface QueryGetProposedUpgradeResponse { + proposedUpgrade: ProposedUpgrade | undefined; +} + +export interface QueryAllProposedUpgradeRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllProposedUpgradeResponse { + proposedUpgrade: ProposedUpgrade[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetApprovedUpgradeRequest { + name: string; +} + +export interface QueryGetApprovedUpgradeResponse { + approvedUpgrade: ApprovedUpgrade | undefined; +} + +export interface QueryAllApprovedUpgradeRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllApprovedUpgradeResponse { + approvedUpgrade: ApprovedUpgrade[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetRejectedUpgradeRequest { + name: string; +} + +export interface QueryGetRejectedUpgradeResponse { + rejectedUpgrade: RejectedUpgrade | undefined; +} + +export interface QueryAllRejectedUpgradeRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllRejectedUpgradeResponse { + rejectedUpgrade: RejectedUpgrade[]; + pagination: PageResponse | undefined; +} + +function createBaseQueryGetProposedUpgradeRequest(): QueryGetProposedUpgradeRequest { + return { name: "" }; +} + +export const QueryGetProposedUpgradeRequest = { + encode(message: QueryGetProposedUpgradeRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetProposedUpgradeRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetProposedUpgradeRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetProposedUpgradeRequest { + return { name: isSet(object.name) ? String(object.name) : "" }; + }, + + toJSON(message: QueryGetProposedUpgradeRequest): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetProposedUpgradeRequest { + const message = createBaseQueryGetProposedUpgradeRequest(); + message.name = object.name ?? ""; + return message; + }, +}; + +function createBaseQueryGetProposedUpgradeResponse(): QueryGetProposedUpgradeResponse { + return { proposedUpgrade: undefined }; +} + +export const QueryGetProposedUpgradeResponse = { + encode(message: QueryGetProposedUpgradeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.proposedUpgrade !== undefined) { + ProposedUpgrade.encode(message.proposedUpgrade, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetProposedUpgradeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetProposedUpgradeResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.proposedUpgrade = ProposedUpgrade.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetProposedUpgradeResponse { + return { + proposedUpgrade: isSet(object.proposedUpgrade) ? ProposedUpgrade.fromJSON(object.proposedUpgrade) : undefined, + }; + }, + + toJSON(message: QueryGetProposedUpgradeResponse): unknown { + const obj: any = {}; + message.proposedUpgrade !== undefined + && (obj.proposedUpgrade = message.proposedUpgrade ? ProposedUpgrade.toJSON(message.proposedUpgrade) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetProposedUpgradeResponse { + const message = createBaseQueryGetProposedUpgradeResponse(); + message.proposedUpgrade = (object.proposedUpgrade !== undefined && object.proposedUpgrade !== null) + ? ProposedUpgrade.fromPartial(object.proposedUpgrade) + : undefined; + return message; + }, +}; + +function createBaseQueryAllProposedUpgradeRequest(): QueryAllProposedUpgradeRequest { + return { pagination: undefined }; +} + +export const QueryAllProposedUpgradeRequest = { + encode(message: QueryAllProposedUpgradeRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllProposedUpgradeRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllProposedUpgradeRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllProposedUpgradeRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllProposedUpgradeRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllProposedUpgradeRequest { + const message = createBaseQueryAllProposedUpgradeRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllProposedUpgradeResponse(): QueryAllProposedUpgradeResponse { + return { proposedUpgrade: [], pagination: undefined }; +} + +export const QueryAllProposedUpgradeResponse = { + encode(message: QueryAllProposedUpgradeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.proposedUpgrade) { + ProposedUpgrade.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllProposedUpgradeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllProposedUpgradeResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.proposedUpgrade.push(ProposedUpgrade.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllProposedUpgradeResponse { + return { + proposedUpgrade: Array.isArray(object?.proposedUpgrade) + ? object.proposedUpgrade.map((e: any) => ProposedUpgrade.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllProposedUpgradeResponse): unknown { + const obj: any = {}; + if (message.proposedUpgrade) { + obj.proposedUpgrade = message.proposedUpgrade.map((e) => e ? ProposedUpgrade.toJSON(e) : undefined); + } else { + obj.proposedUpgrade = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllProposedUpgradeResponse { + const message = createBaseQueryAllProposedUpgradeResponse(); + message.proposedUpgrade = object.proposedUpgrade?.map((e) => ProposedUpgrade.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetApprovedUpgradeRequest(): QueryGetApprovedUpgradeRequest { + return { name: "" }; +} + +export const QueryGetApprovedUpgradeRequest = { + encode(message: QueryGetApprovedUpgradeRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetApprovedUpgradeRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetApprovedUpgradeRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetApprovedUpgradeRequest { + return { name: isSet(object.name) ? String(object.name) : "" }; + }, + + toJSON(message: QueryGetApprovedUpgradeRequest): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetApprovedUpgradeRequest { + const message = createBaseQueryGetApprovedUpgradeRequest(); + message.name = object.name ?? ""; + return message; + }, +}; + +function createBaseQueryGetApprovedUpgradeResponse(): QueryGetApprovedUpgradeResponse { + return { approvedUpgrade: undefined }; +} + +export const QueryGetApprovedUpgradeResponse = { + encode(message: QueryGetApprovedUpgradeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.approvedUpgrade !== undefined) { + ApprovedUpgrade.encode(message.approvedUpgrade, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetApprovedUpgradeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetApprovedUpgradeResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.approvedUpgrade = ApprovedUpgrade.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetApprovedUpgradeResponse { + return { + approvedUpgrade: isSet(object.approvedUpgrade) ? ApprovedUpgrade.fromJSON(object.approvedUpgrade) : undefined, + }; + }, + + toJSON(message: QueryGetApprovedUpgradeResponse): unknown { + const obj: any = {}; + message.approvedUpgrade !== undefined + && (obj.approvedUpgrade = message.approvedUpgrade ? ApprovedUpgrade.toJSON(message.approvedUpgrade) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetApprovedUpgradeResponse { + const message = createBaseQueryGetApprovedUpgradeResponse(); + message.approvedUpgrade = (object.approvedUpgrade !== undefined && object.approvedUpgrade !== null) + ? ApprovedUpgrade.fromPartial(object.approvedUpgrade) + : undefined; + return message; + }, +}; + +function createBaseQueryAllApprovedUpgradeRequest(): QueryAllApprovedUpgradeRequest { + return { pagination: undefined }; +} + +export const QueryAllApprovedUpgradeRequest = { + encode(message: QueryAllApprovedUpgradeRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllApprovedUpgradeRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllApprovedUpgradeRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllApprovedUpgradeRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllApprovedUpgradeRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllApprovedUpgradeRequest { + const message = createBaseQueryAllApprovedUpgradeRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllApprovedUpgradeResponse(): QueryAllApprovedUpgradeResponse { + return { approvedUpgrade: [], pagination: undefined }; +} + +export const QueryAllApprovedUpgradeResponse = { + encode(message: QueryAllApprovedUpgradeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.approvedUpgrade) { + ApprovedUpgrade.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllApprovedUpgradeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllApprovedUpgradeResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.approvedUpgrade.push(ApprovedUpgrade.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllApprovedUpgradeResponse { + return { + approvedUpgrade: Array.isArray(object?.approvedUpgrade) + ? object.approvedUpgrade.map((e: any) => ApprovedUpgrade.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllApprovedUpgradeResponse): unknown { + const obj: any = {}; + if (message.approvedUpgrade) { + obj.approvedUpgrade = message.approvedUpgrade.map((e) => e ? ApprovedUpgrade.toJSON(e) : undefined); + } else { + obj.approvedUpgrade = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllApprovedUpgradeResponse { + const message = createBaseQueryAllApprovedUpgradeResponse(); + message.approvedUpgrade = object.approvedUpgrade?.map((e) => ApprovedUpgrade.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetRejectedUpgradeRequest(): QueryGetRejectedUpgradeRequest { + return { name: "" }; +} + +export const QueryGetRejectedUpgradeRequest = { + encode(message: QueryGetRejectedUpgradeRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRejectedUpgradeRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRejectedUpgradeRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRejectedUpgradeRequest { + return { name: isSet(object.name) ? String(object.name) : "" }; + }, + + toJSON(message: QueryGetRejectedUpgradeRequest): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRejectedUpgradeRequest { + const message = createBaseQueryGetRejectedUpgradeRequest(); + message.name = object.name ?? ""; + return message; + }, +}; + +function createBaseQueryGetRejectedUpgradeResponse(): QueryGetRejectedUpgradeResponse { + return { rejectedUpgrade: undefined }; +} + +export const QueryGetRejectedUpgradeResponse = { + encode(message: QueryGetRejectedUpgradeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.rejectedUpgrade !== undefined) { + RejectedUpgrade.encode(message.rejectedUpgrade, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRejectedUpgradeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRejectedUpgradeResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rejectedUpgrade = RejectedUpgrade.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRejectedUpgradeResponse { + return { + rejectedUpgrade: isSet(object.rejectedUpgrade) ? RejectedUpgrade.fromJSON(object.rejectedUpgrade) : undefined, + }; + }, + + toJSON(message: QueryGetRejectedUpgradeResponse): unknown { + const obj: any = {}; + message.rejectedUpgrade !== undefined + && (obj.rejectedUpgrade = message.rejectedUpgrade ? RejectedUpgrade.toJSON(message.rejectedUpgrade) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRejectedUpgradeResponse { + const message = createBaseQueryGetRejectedUpgradeResponse(); + message.rejectedUpgrade = (object.rejectedUpgrade !== undefined && object.rejectedUpgrade !== null) + ? RejectedUpgrade.fromPartial(object.rejectedUpgrade) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRejectedUpgradeRequest(): QueryAllRejectedUpgradeRequest { + return { pagination: undefined }; +} + +export const QueryAllRejectedUpgradeRequest = { + encode(message: QueryAllRejectedUpgradeRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRejectedUpgradeRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRejectedUpgradeRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRejectedUpgradeRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllRejectedUpgradeRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllRejectedUpgradeRequest { + const message = createBaseQueryAllRejectedUpgradeRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRejectedUpgradeResponse(): QueryAllRejectedUpgradeResponse { + return { rejectedUpgrade: [], pagination: undefined }; +} + +export const QueryAllRejectedUpgradeResponse = { + encode(message: QueryAllRejectedUpgradeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rejectedUpgrade) { + RejectedUpgrade.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRejectedUpgradeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRejectedUpgradeResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rejectedUpgrade.push(RejectedUpgrade.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRejectedUpgradeResponse { + return { + rejectedUpgrade: Array.isArray(object?.rejectedUpgrade) + ? object.rejectedUpgrade.map((e: any) => RejectedUpgrade.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllRejectedUpgradeResponse): unknown { + const obj: any = {}; + if (message.rejectedUpgrade) { + obj.rejectedUpgrade = message.rejectedUpgrade.map((e) => e ? RejectedUpgrade.toJSON(e) : undefined); + } else { + obj.rejectedUpgrade = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllRejectedUpgradeResponse { + const message = createBaseQueryAllRejectedUpgradeResponse(); + message.rejectedUpgrade = object.rejectedUpgrade?.map((e) => RejectedUpgrade.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +/** Query defines the gRPC querier service. */ +export interface Query { + /** Queries a ProposedUpgrade by index. */ + ProposedUpgrade(request: QueryGetProposedUpgradeRequest): Promise; + /** Queries a list of ProposedUpgrade items. */ + ProposedUpgradeAll(request: QueryAllProposedUpgradeRequest): Promise; + /** Queries a ApprovedUpgrade by index. */ + ApprovedUpgrade(request: QueryGetApprovedUpgradeRequest): Promise; + /** Queries a list of ApprovedUpgrade items. */ + ApprovedUpgradeAll(request: QueryAllApprovedUpgradeRequest): Promise; + /** Queries a RejectedUpgrade by index. */ + RejectedUpgrade(request: QueryGetRejectedUpgradeRequest): Promise; + /** Queries a list of RejectedUpgrade items. */ + RejectedUpgradeAll(request: QueryAllRejectedUpgradeRequest): Promise; +} + +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.ProposedUpgrade = this.ProposedUpgrade.bind(this); + this.ProposedUpgradeAll = this.ProposedUpgradeAll.bind(this); + this.ApprovedUpgrade = this.ApprovedUpgrade.bind(this); + this.ApprovedUpgradeAll = this.ApprovedUpgradeAll.bind(this); + this.RejectedUpgrade = this.RejectedUpgrade.bind(this); + this.RejectedUpgradeAll = this.RejectedUpgradeAll.bind(this); + } + ProposedUpgrade(request: QueryGetProposedUpgradeRequest): Promise { + const data = QueryGetProposedUpgradeRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclupgrade.Query", + "ProposedUpgrade", + data, + ); + return promise.then((data) => QueryGetProposedUpgradeResponse.decode(new _m0.Reader(data))); + } + + ProposedUpgradeAll(request: QueryAllProposedUpgradeRequest): Promise { + const data = QueryAllProposedUpgradeRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclupgrade.Query", + "ProposedUpgradeAll", + data, + ); + return promise.then((data) => QueryAllProposedUpgradeResponse.decode(new _m0.Reader(data))); + } + + ApprovedUpgrade(request: QueryGetApprovedUpgradeRequest): Promise { + const data = QueryGetApprovedUpgradeRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclupgrade.Query", + "ApprovedUpgrade", + data, + ); + return promise.then((data) => QueryGetApprovedUpgradeResponse.decode(new _m0.Reader(data))); + } + + ApprovedUpgradeAll(request: QueryAllApprovedUpgradeRequest): Promise { + const data = QueryAllApprovedUpgradeRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclupgrade.Query", + "ApprovedUpgradeAll", + data, + ); + return promise.then((data) => QueryAllApprovedUpgradeResponse.decode(new _m0.Reader(data))); + } + + RejectedUpgrade(request: QueryGetRejectedUpgradeRequest): Promise { + const data = QueryGetRejectedUpgradeRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclupgrade.Query", + "RejectedUpgrade", + data, + ); + return promise.then((data) => QueryGetRejectedUpgradeResponse.decode(new _m0.Reader(data))); + } + + RejectedUpgradeAll(request: QueryAllRejectedUpgradeRequest): Promise { + const data = QueryAllRejectedUpgradeRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclupgrade.Query", + "RejectedUpgradeAll", + data, + ); + return promise.then((data) => QueryAllRejectedUpgradeResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/rejected_upgrade.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/rejected_upgrade.ts new file mode 100644 index 000000000..1de7a78f0 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/rejected_upgrade.ts @@ -0,0 +1,112 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Plan } from "../../../cosmos/upgrade/v1beta1/upgrade"; +import { Grant } from "./grant"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclupgrade"; + +export interface RejectedUpgrade { + plan: Plan | undefined; + creator: string; + approvals: Grant[]; + rejects: Grant[]; +} + +function createBaseRejectedUpgrade(): RejectedUpgrade { + return { plan: undefined, creator: "", approvals: [], rejects: [] }; +} + +export const RejectedUpgrade = { + encode(message: RejectedUpgrade, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.plan !== undefined) { + Plan.encode(message.plan, writer.uint32(10).fork()).ldelim(); + } + if (message.creator !== "") { + writer.uint32(18).string(message.creator); + } + for (const v of message.approvals) { + Grant.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.rejects) { + Grant.encode(v!, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RejectedUpgrade { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRejectedUpgrade(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.plan = Plan.decode(reader, reader.uint32()); + break; + case 2: + message.creator = reader.string(); + break; + case 3: + message.approvals.push(Grant.decode(reader, reader.uint32())); + break; + case 4: + message.rejects.push(Grant.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RejectedUpgrade { + return { + plan: isSet(object.plan) ? Plan.fromJSON(object.plan) : undefined, + creator: isSet(object.creator) ? String(object.creator) : "", + approvals: Array.isArray(object?.approvals) ? object.approvals.map((e: any) => Grant.fromJSON(e)) : [], + rejects: Array.isArray(object?.rejects) ? object.rejects.map((e: any) => Grant.fromJSON(e)) : [], + }; + }, + + toJSON(message: RejectedUpgrade): unknown { + const obj: any = {}; + message.plan !== undefined && (obj.plan = message.plan ? Plan.toJSON(message.plan) : undefined); + message.creator !== undefined && (obj.creator = message.creator); + if (message.approvals) { + obj.approvals = message.approvals.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.approvals = []; + } + if (message.rejects) { + obj.rejects = message.rejects.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.rejects = []; + } + return obj; + }, + + fromPartial, I>>(object: I): RejectedUpgrade { + const message = createBaseRejectedUpgrade(); + message.plan = (object.plan !== undefined && object.plan !== null) ? Plan.fromPartial(object.plan) : undefined; + message.creator = object.creator ?? ""; + message.approvals = object.approvals?.map((e) => Grant.fromPartial(e)) || []; + message.rejects = object.rejects?.map((e) => Grant.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/tx.ts b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/tx.ts new file mode 100644 index 000000000..27b6bfc8f --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.dclupgrade/types/zigbeealliance/distributedcomplianceledger/dclupgrade/tx.ts @@ -0,0 +1,478 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Plan } from "../../../cosmos/upgrade/v1beta1/upgrade"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.dclupgrade"; + +export interface MsgProposeUpgrade { + creator: string; + plan: Plan | undefined; + info: string; + time: number; +} + +export interface MsgProposeUpgradeResponse { +} + +export interface MsgApproveUpgrade { + creator: string; + name: string; + info: string; + time: number; +} + +export interface MsgApproveUpgradeResponse { +} + +export interface MsgRejectUpgrade { + creator: string; + name: string; + info: string; + time: number; +} + +export interface MsgRejectUpgradeResponse { +} + +function createBaseMsgProposeUpgrade(): MsgProposeUpgrade { + return { creator: "", plan: undefined, info: "", time: 0 }; +} + +export const MsgProposeUpgrade = { + encode(message: MsgProposeUpgrade, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.plan !== undefined) { + Plan.encode(message.plan, writer.uint32(18).fork()).ldelim(); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + if (message.time !== 0) { + writer.uint32(32).int64(message.time); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgProposeUpgrade { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProposeUpgrade(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.plan = Plan.decode(reader, reader.uint32()); + break; + case 3: + message.info = reader.string(); + break; + case 4: + message.time = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgProposeUpgrade { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + plan: isSet(object.plan) ? Plan.fromJSON(object.plan) : undefined, + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + }; + }, + + toJSON(message: MsgProposeUpgrade): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.plan !== undefined && (obj.plan = message.plan ? Plan.toJSON(message.plan) : undefined); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + return obj; + }, + + fromPartial, I>>(object: I): MsgProposeUpgrade { + const message = createBaseMsgProposeUpgrade(); + message.creator = object.creator ?? ""; + message.plan = (object.plan !== undefined && object.plan !== null) ? Plan.fromPartial(object.plan) : undefined; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + return message; + }, +}; + +function createBaseMsgProposeUpgradeResponse(): MsgProposeUpgradeResponse { + return {}; +} + +export const MsgProposeUpgradeResponse = { + encode(_: MsgProposeUpgradeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgProposeUpgradeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProposeUpgradeResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgProposeUpgradeResponse { + return {}; + }, + + toJSON(_: MsgProposeUpgradeResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgProposeUpgradeResponse { + const message = createBaseMsgProposeUpgradeResponse(); + return message; + }, +}; + +function createBaseMsgApproveUpgrade(): MsgApproveUpgrade { + return { creator: "", name: "", info: "", time: 0 }; +} + +export const MsgApproveUpgrade = { + encode(message: MsgApproveUpgrade, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.name !== "") { + writer.uint32(18).string(message.name); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + if (message.time !== 0) { + writer.uint32(32).int64(message.time); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgApproveUpgrade { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgApproveUpgrade(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.name = reader.string(); + break; + case 3: + message.info = reader.string(); + break; + case 4: + message.time = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgApproveUpgrade { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + name: isSet(object.name) ? String(object.name) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + }; + }, + + toJSON(message: MsgApproveUpgrade): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.name !== undefined && (obj.name = message.name); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + return obj; + }, + + fromPartial, I>>(object: I): MsgApproveUpgrade { + const message = createBaseMsgApproveUpgrade(); + message.creator = object.creator ?? ""; + message.name = object.name ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + return message; + }, +}; + +function createBaseMsgApproveUpgradeResponse(): MsgApproveUpgradeResponse { + return {}; +} + +export const MsgApproveUpgradeResponse = { + encode(_: MsgApproveUpgradeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgApproveUpgradeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgApproveUpgradeResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgApproveUpgradeResponse { + return {}; + }, + + toJSON(_: MsgApproveUpgradeResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgApproveUpgradeResponse { + const message = createBaseMsgApproveUpgradeResponse(); + return message; + }, +}; + +function createBaseMsgRejectUpgrade(): MsgRejectUpgrade { + return { creator: "", name: "", info: "", time: 0 }; +} + +export const MsgRejectUpgrade = { + encode(message: MsgRejectUpgrade, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.name !== "") { + writer.uint32(18).string(message.name); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + if (message.time !== 0) { + writer.uint32(32).int64(message.time); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRejectUpgrade { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRejectUpgrade(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.name = reader.string(); + break; + case 3: + message.info = reader.string(); + break; + case 4: + message.time = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgRejectUpgrade { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + name: isSet(object.name) ? String(object.name) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + }; + }, + + toJSON(message: MsgRejectUpgrade): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.name !== undefined && (obj.name = message.name); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + return obj; + }, + + fromPartial, I>>(object: I): MsgRejectUpgrade { + const message = createBaseMsgRejectUpgrade(); + message.creator = object.creator ?? ""; + message.name = object.name ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + return message; + }, +}; + +function createBaseMsgRejectUpgradeResponse(): MsgRejectUpgradeResponse { + return {}; +} + +export const MsgRejectUpgradeResponse = { + encode(_: MsgRejectUpgradeResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRejectUpgradeResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRejectUpgradeResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgRejectUpgradeResponse { + return {}; + }, + + toJSON(_: MsgRejectUpgradeResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgRejectUpgradeResponse { + const message = createBaseMsgRejectUpgradeResponse(); + return message; + }, +}; + +/** Msg defines the Msg service. */ +export interface Msg { + ProposeUpgrade(request: MsgProposeUpgrade): Promise; + ApproveUpgrade(request: MsgApproveUpgrade): Promise; + /** this line is used by starport scaffolding # proto/tx/rpc */ + RejectUpgrade(request: MsgRejectUpgrade): Promise; +} + +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.ProposeUpgrade = this.ProposeUpgrade.bind(this); + this.ApproveUpgrade = this.ApproveUpgrade.bind(this); + this.RejectUpgrade = this.RejectUpgrade.bind(this); + } + ProposeUpgrade(request: MsgProposeUpgrade): Promise { + const data = MsgProposeUpgrade.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclupgrade.Msg", + "ProposeUpgrade", + data, + ); + return promise.then((data) => MsgProposeUpgradeResponse.decode(new _m0.Reader(data))); + } + + ApproveUpgrade(request: MsgApproveUpgrade): Promise { + const data = MsgApproveUpgrade.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclupgrade.Msg", + "ApproveUpgrade", + data, + ); + return promise.then((data) => MsgApproveUpgradeResponse.decode(new _m0.Reader(data))); + } + + RejectUpgrade(request: MsgRejectUpgrade): Promise { + const data = MsgRejectUpgrade.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.dclupgrade.Msg", + "RejectUpgrade", + data, + ); + return promise.then((data) => MsgRejectUpgradeResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/api.swagger.yml b/ts-client/zigbeealliance.distributedcomplianceledger.model/api.swagger.yml new file mode 100644 index 000000000..12a85d70c --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/api.swagger.yml @@ -0,0 +1,809 @@ +swagger: '2.0' +info: + title: HTTP API Console zigbeealliance.distributedcomplianceledger.model + name: '' + description: '' +paths: + /dcl/model/models: + get: + operationId: ModelAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + model: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + deviceTypeId: + type: integer + format: int32 + productName: + type: string + productLabel: + type: string + partNumber: + type: string + commissioningCustomFlow: + type: integer + format: int32 + commissioningCustomFlowUrl: + type: string + commissioningModeInitialStepsHint: + type: integer + format: int64 + commissioningModeInitialStepsInstruction: + type: string + commissioningModeSecondaryStepsHint: + type: integer + format: int64 + commissioningModeSecondaryStepsInstruction: + type: string + userManualUrl: + type: string + supportUrl: + type: string + productUrl: + type: string + lsfUrl: + type: string + lsfRevision: + type: integer + format: int32 + creator: + type: string + schemaVersion: + type: integer + format: int64 + commissionerRemoteUiFlowUrl: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/model/models/{vid}: + get: + operationId: VendorProducts + responses: + '200': + description: A successful response. + schema: + type: object + properties: + vendorProducts: + type: object + properties: + vid: + type: integer + format: int32 + products: + type: array + items: + type: object + properties: + pid: + type: integer + format: int32 + name: + type: string + partNumber: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: vid + in: path + required: true + type: integer + format: int32 + tags: + - Query + /dcl/model/models/{vid}/{pid}: + get: + operationId: Model + responses: + '200': + description: A successful response. + schema: + type: object + properties: + model: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + deviceTypeId: + type: integer + format: int32 + productName: + type: string + productLabel: + type: string + partNumber: + type: string + commissioningCustomFlow: + type: integer + format: int32 + commissioningCustomFlowUrl: + type: string + commissioningModeInitialStepsHint: + type: integer + format: int64 + commissioningModeInitialStepsInstruction: + type: string + commissioningModeSecondaryStepsHint: + type: integer + format: int64 + commissioningModeSecondaryStepsInstruction: + type: string + userManualUrl: + type: string + supportUrl: + type: string + productUrl: + type: string + lsfUrl: + type: string + lsfRevision: + type: integer + format: int32 + creator: + type: string + schemaVersion: + type: integer + format: int64 + commissionerRemoteUiFlowUrl: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: vid + in: path + required: true + type: integer + format: int32 + - name: pid + in: path + required: true + type: integer + format: int32 + tags: + - Query + /dcl/model/versions/{vid}/{pid}: + get: + operationId: ModelVersions + responses: + '200': + description: A successful response. + schema: + type: object + properties: + modelVersions: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersions: + type: array + items: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: vid + in: path + required: true + type: integer + format: int32 + - name: pid + in: path + required: true + type: integer + format: int32 + tags: + - Query + /dcl/model/versions/{vid}/{pid}/{softwareVersion}: + get: + operationId: ModelVersion + responses: + '200': + description: A successful response. + schema: + type: object + properties: + modelVersion: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + softwareVersionString: + type: string + cdVersionNumber: + type: integer + format: int32 + firmwareInformation: + type: string + softwareVersionValid: + type: boolean + otaUrl: + type: string + otaFileSize: + type: string + format: uint64 + otaChecksum: + type: string + otaChecksumType: + type: integer + format: int32 + minApplicableSoftwareVersion: + type: integer + format: int64 + maxApplicableSoftwareVersion: + type: integer + format: int64 + releaseNotesUrl: + type: string + creator: + type: string + schemaVersion: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: vid + in: path + required: true + type: integer + format: int32 + - name: pid + in: path + required: true + type: integer + format: int32 + - name: softwareVersion + in: path + required: true + type: integer + format: int64 + tags: + - Query +definitions: + Any: + type: object + properties: + '@type': + type: string + additionalProperties: {} + Status: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + PageRequest: + type: object + properties: + key: + type: string + format: byte + offset: + type: string + format: uint64 + limit: + type: string + format: uint64 + count_total: + type: boolean + reverse: + type: boolean + PageResponse: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + Product: + type: object + properties: + pid: + type: integer + format: int32 + name: + type: string + partNumber: + type: string + QueryAllModelResponse: + type: object + properties: + model: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + deviceTypeId: + type: integer + format: int32 + productName: + type: string + productLabel: + type: string + partNumber: + type: string + commissioningCustomFlow: + type: integer + format: int32 + commissioningCustomFlowUrl: + type: string + commissioningModeInitialStepsHint: + type: integer + format: int64 + commissioningModeInitialStepsInstruction: + type: string + commissioningModeSecondaryStepsHint: + type: integer + format: int64 + commissioningModeSecondaryStepsInstruction: + type: string + userManualUrl: + type: string + supportUrl: + type: string + productUrl: + type: string + lsfUrl: + type: string + lsfRevision: + type: integer + format: int32 + creator: + type: string + schemaVersion: + type: integer + format: int64 + commissionerRemoteUiFlowUrl: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryGetModelResponse: + type: object + properties: + model: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + deviceTypeId: + type: integer + format: int32 + productName: + type: string + productLabel: + type: string + partNumber: + type: string + commissioningCustomFlow: + type: integer + format: int32 + commissioningCustomFlowUrl: + type: string + commissioningModeInitialStepsHint: + type: integer + format: int64 + commissioningModeInitialStepsInstruction: + type: string + commissioningModeSecondaryStepsHint: + type: integer + format: int64 + commissioningModeSecondaryStepsInstruction: + type: string + userManualUrl: + type: string + supportUrl: + type: string + productUrl: + type: string + lsfUrl: + type: string + lsfRevision: + type: integer + format: int32 + creator: + type: string + schemaVersion: + type: integer + format: int64 + commissionerRemoteUiFlowUrl: + type: string + QueryGetModelVersionResponse: + type: object + properties: + modelVersion: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + softwareVersionString: + type: string + cdVersionNumber: + type: integer + format: int32 + firmwareInformation: + type: string + softwareVersionValid: + type: boolean + otaUrl: + type: string + otaFileSize: + type: string + format: uint64 + otaChecksum: + type: string + otaChecksumType: + type: integer + format: int32 + minApplicableSoftwareVersion: + type: integer + format: int64 + maxApplicableSoftwareVersion: + type: integer + format: int64 + releaseNotesUrl: + type: string + creator: + type: string + schemaVersion: + type: integer + format: int64 + QueryGetModelVersionsResponse: + type: object + properties: + modelVersions: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersions: + type: array + items: + type: integer + format: int64 + QueryGetVendorProductsResponse: + type: object + properties: + vendorProducts: + type: object + properties: + vid: + type: integer + format: int32 + products: + type: array + items: + type: object + properties: + pid: + type: integer + format: int32 + name: + type: string + partNumber: + type: string + model.Model: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + deviceTypeId: + type: integer + format: int32 + productName: + type: string + productLabel: + type: string + partNumber: + type: string + commissioningCustomFlow: + type: integer + format: int32 + commissioningCustomFlowUrl: + type: string + commissioningModeInitialStepsHint: + type: integer + format: int64 + commissioningModeInitialStepsInstruction: + type: string + commissioningModeSecondaryStepsHint: + type: integer + format: int64 + commissioningModeSecondaryStepsInstruction: + type: string + userManualUrl: + type: string + supportUrl: + type: string + productUrl: + type: string + lsfUrl: + type: string + lsfRevision: + type: integer + format: int32 + creator: + type: string + schemaVersion: + type: integer + format: int64 + commissionerRemoteUiFlowUrl: + type: string + model.ModelVersion: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersion: + type: integer + format: int64 + softwareVersionString: + type: string + cdVersionNumber: + type: integer + format: int32 + firmwareInformation: + type: string + softwareVersionValid: + type: boolean + otaUrl: + type: string + otaFileSize: + type: string + format: uint64 + otaChecksum: + type: string + otaChecksumType: + type: integer + format: int32 + minApplicableSoftwareVersion: + type: integer + format: int64 + maxApplicableSoftwareVersion: + type: integer + format: int64 + releaseNotesUrl: + type: string + creator: + type: string + schemaVersion: + type: integer + format: int64 + model.ModelVersions: + type: object + properties: + vid: + type: integer + format: int32 + pid: + type: integer + format: int32 + softwareVersions: + type: array + items: + type: integer + format: int64 + model.VendorProducts: + type: object + properties: + vid: + type: integer + format: int32 + products: + type: array + items: + type: object + properties: + pid: + type: integer + format: int32 + name: + type: string + partNumber: + type: string + MsgCreateModelResponse: + type: object + MsgCreateModelVersionResponse: + type: object + MsgDeleteModelResponse: + type: object + MsgDeleteModelVersionResponse: + type: object + MsgUpdateModelResponse: + type: object + MsgUpdateModelVersionResponse: + type: object diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/index.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/module.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/module.ts new file mode 100755 index 000000000..385173dea --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/module.ts @@ -0,0 +1,304 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; +import { MsgCreateModel } from "./types/zigbeealliance/distributedcomplianceledger/model/tx"; +import { MsgUpdateModel } from "./types/zigbeealliance/distributedcomplianceledger/model/tx"; +import { MsgDeleteModelVersion } from "./types/zigbeealliance/distributedcomplianceledger/model/tx"; +import { MsgCreateModelVersion } from "./types/zigbeealliance/distributedcomplianceledger/model/tx"; +import { MsgUpdateModelVersion } from "./types/zigbeealliance/distributedcomplianceledger/model/tx"; +import { MsgDeleteModel } from "./types/zigbeealliance/distributedcomplianceledger/model/tx"; + +import { Model as typeModel} from "./types" +import { ModelVersion as typeModelVersion} from "./types" +import { ModelVersions as typeModelVersions} from "./types" +import { Product as typeProduct} from "./types" +import { VendorProducts as typeVendorProducts} from "./types" + +export { MsgCreateModel, MsgUpdateModel, MsgDeleteModelVersion, MsgCreateModelVersion, MsgUpdateModelVersion, MsgDeleteModel }; + +type sendMsgCreateModelParams = { + value: MsgCreateModel, + fee?: StdFee, + memo?: string +}; + +type sendMsgUpdateModelParams = { + value: MsgUpdateModel, + fee?: StdFee, + memo?: string +}; + +type sendMsgDeleteModelVersionParams = { + value: MsgDeleteModelVersion, + fee?: StdFee, + memo?: string +}; + +type sendMsgCreateModelVersionParams = { + value: MsgCreateModelVersion, + fee?: StdFee, + memo?: string +}; + +type sendMsgUpdateModelVersionParams = { + value: MsgUpdateModelVersion, + fee?: StdFee, + memo?: string +}; + +type sendMsgDeleteModelParams = { + value: MsgDeleteModel, + fee?: StdFee, + memo?: string +}; + + +type msgCreateModelParams = { + value: MsgCreateModel, +}; + +type msgUpdateModelParams = { + value: MsgUpdateModel, +}; + +type msgDeleteModelVersionParams = { + value: MsgDeleteModelVersion, +}; + +type msgCreateModelVersionParams = { + value: MsgCreateModelVersion, +}; + +type msgUpdateModelVersionParams = { + value: MsgUpdateModelVersion, +}; + +type msgDeleteModelParams = { + value: MsgDeleteModel, +}; + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + async sendMsgCreateModel({ value, fee, memo }: sendMsgCreateModelParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgCreateModel: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgCreateModel({ value: MsgCreateModel.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgCreateModel: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgUpdateModel({ value, fee, memo }: sendMsgUpdateModelParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgUpdateModel: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgUpdateModel({ value: MsgUpdateModel.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgUpdateModel: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgDeleteModelVersion({ value, fee, memo }: sendMsgDeleteModelVersionParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgDeleteModelVersion: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgDeleteModelVersion({ value: MsgDeleteModelVersion.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgDeleteModelVersion: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgCreateModelVersion({ value, fee, memo }: sendMsgCreateModelVersionParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgCreateModelVersion: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgCreateModelVersion({ value: MsgCreateModelVersion.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgCreateModelVersion: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgUpdateModelVersion({ value, fee, memo }: sendMsgUpdateModelVersionParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgUpdateModelVersion: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgUpdateModelVersion({ value: MsgUpdateModelVersion.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgUpdateModelVersion: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgDeleteModel({ value, fee, memo }: sendMsgDeleteModelParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgDeleteModel: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgDeleteModel({ value: MsgDeleteModel.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgDeleteModel: Could not broadcast Tx: '+ e.message) + } + }, + + + msgCreateModel({ value }: msgCreateModelParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.model.MsgCreateModel", value: MsgCreateModel.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgCreateModel: Could not create message: ' + e.message) + } + }, + + msgUpdateModel({ value }: msgUpdateModelParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.model.MsgUpdateModel", value: MsgUpdateModel.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgUpdateModel: Could not create message: ' + e.message) + } + }, + + msgDeleteModelVersion({ value }: msgDeleteModelVersionParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.model.MsgDeleteModelVersion", value: MsgDeleteModelVersion.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgDeleteModelVersion: Could not create message: ' + e.message) + } + }, + + msgCreateModelVersion({ value }: msgCreateModelVersionParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.model.MsgCreateModelVersion", value: MsgCreateModelVersion.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgCreateModelVersion: Could not create message: ' + e.message) + } + }, + + msgUpdateModelVersion({ value }: msgUpdateModelVersionParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.model.MsgUpdateModelVersion", value: MsgUpdateModelVersion.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgUpdateModelVersion: Could not create message: ' + e.message) + } + }, + + msgDeleteModel({ value }: msgDeleteModelParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.model.MsgDeleteModel", value: MsgDeleteModel.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgDeleteModel: Could not create message: ' + e.message) + } + }, + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + Model: getStructure(typeModel.fromPartial({})), + ModelVersion: getStructure(typeModelVersion.fromPartial({})), + ModelVersions: getStructure(typeModelVersions.fromPartial({})), + Product: getStructure(typeProduct.fromPartial({})), + VendorProducts: getStructure(typeVendorProducts.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + ZigbeeallianceDistributedcomplianceledgerModel: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/registry.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/registry.ts new file mode 100755 index 000000000..8537d5bfb --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/registry.ts @@ -0,0 +1,19 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; +import { MsgCreateModel } from "./types/zigbeealliance/distributedcomplianceledger/model/tx"; +import { MsgUpdateModel } from "./types/zigbeealliance/distributedcomplianceledger/model/tx"; +import { MsgDeleteModelVersion } from "./types/zigbeealliance/distributedcomplianceledger/model/tx"; +import { MsgCreateModelVersion } from "./types/zigbeealliance/distributedcomplianceledger/model/tx"; +import { MsgUpdateModelVersion } from "./types/zigbeealliance/distributedcomplianceledger/model/tx"; +import { MsgDeleteModel } from "./types/zigbeealliance/distributedcomplianceledger/model/tx"; + +const msgTypes: Array<[string, GeneratedType]> = [ + ["/zigbeealliance.distributedcomplianceledger.model.MsgCreateModel", MsgCreateModel], + ["/zigbeealliance.distributedcomplianceledger.model.MsgUpdateModel", MsgUpdateModel], + ["/zigbeealliance.distributedcomplianceledger.model.MsgDeleteModelVersion", MsgDeleteModelVersion], + ["/zigbeealliance.distributedcomplianceledger.model.MsgCreateModelVersion", MsgCreateModelVersion], + ["/zigbeealliance.distributedcomplianceledger.model.MsgUpdateModelVersion", MsgUpdateModelVersion], + ["/zigbeealliance.distributedcomplianceledger.model.MsgDeleteModel", MsgDeleteModel], + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/rest.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/rest.ts new file mode 100644 index 000000000..c1b333ebf --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/rest.ts @@ -0,0 +1,448 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +export interface DistributedcomplianceledgermodelModel { + /** @format int32 */ + vid?: number; + + /** @format int32 */ + pid?: number; + + /** @format int32 */ + deviceTypeId?: number; + productName?: string; + productLabel?: string; + partNumber?: string; + + /** @format int32 */ + commissioningCustomFlow?: number; + commissioningCustomFlowUrl?: string; + + /** @format int64 */ + commissioningModeInitialStepsHint?: number; + commissioningModeInitialStepsInstruction?: string; + + /** @format int64 */ + commissioningModeSecondaryStepsHint?: number; + commissioningModeSecondaryStepsInstruction?: string; + userManualUrl?: string; + supportUrl?: string; + productUrl?: string; + lsfUrl?: string; + + /** @format int32 */ + lsfRevision?: number; + creator?: string; + + /** @format int64 */ + schemaVersion?: number; + commissionerRemoteUiFlowUrl?: string; +} + +export interface DistributedcomplianceledgermodelModelVersion { + /** @format int32 */ + vid?: number; + + /** @format int32 */ + pid?: number; + + /** @format int64 */ + softwareVersion?: number; + softwareVersionString?: string; + + /** @format int32 */ + cdVersionNumber?: number; + firmwareInformation?: string; + softwareVersionValid?: boolean; + otaUrl?: string; + + /** @format uint64 */ + otaFileSize?: string; + otaChecksum?: string; + + /** @format int32 */ + otaChecksumType?: number; + + /** @format int64 */ + minApplicableSoftwareVersion?: number; + + /** @format int64 */ + maxApplicableSoftwareVersion?: number; + releaseNotesUrl?: string; + creator?: string; + + /** @format int64 */ + schemaVersion?: number; +} + +export interface DistributedcomplianceledgermodelModelVersions { + /** @format int32 */ + vid?: number; + + /** @format int32 */ + pid?: number; + softwareVersions?: number[]; +} + +export interface DistributedcomplianceledgermodelVendorProducts { + /** @format int32 */ + vid?: number; + products?: ModelProduct[]; +} + +export type ModelMsgCreateModelResponse = object; + +export type ModelMsgCreateModelVersionResponse = object; + +export type ModelMsgDeleteModelResponse = object; + +export type ModelMsgDeleteModelVersionResponse = object; + +export type ModelMsgUpdateModelResponse = object; + +export type ModelMsgUpdateModelVersionResponse = object; + +export interface ModelProduct { + /** @format int32 */ + pid?: number; + name?: string; + partNumber?: string; +} + +export interface ModelQueryAllModelResponse { + model?: DistributedcomplianceledgermodelModel[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface ModelQueryGetModelResponse { + model?: DistributedcomplianceledgermodelModel; +} + +export interface ModelQueryGetModelVersionResponse { + modelVersion?: DistributedcomplianceledgermodelModelVersion; +} + +export interface ModelQueryGetModelVersionsResponse { + modelVersions?: DistributedcomplianceledgermodelModelVersions; +} + +export interface ModelQueryGetVendorProductsResponse { + vendorProducts?: DistributedcomplianceledgermodelVendorProducts; +} + +export interface ProtobufAny { + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +/** +* message SomeRequest { + Foo some_parameter = 1; + PageRequest pagination = 2; + } +*/ +export interface V1Beta1PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + * @format byte + */ + key?: string; + + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + * @format uint64 + */ + offset?: string; + + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + * @format uint64 + */ + limit?: string; + + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + count_total?: boolean; + + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse?: boolean; +} + +/** +* PageResponse is to be embedded in gRPC response messages where the +corresponding request message has used PageRequest. + + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } +*/ +export interface V1Beta1PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + * @format byte + */ + next_key?: string; + + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + * @format uint64 + */ + total?: string; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title zigbeealliance/distributedcomplianceledger/model/genesis.proto + * @version version not set + */ +export class Api extends HttpClient { + /** + * No description + * + * @tags Query + * @name QueryModelAll + * @summary Queries a list of all Model items. + * @request GET:/dcl/model/models + */ + queryModelAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/model/models`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryVendorProducts + * @summary Queries VendorProducts by index. + * @request GET:/dcl/model/models/{vid} + */ + queryVendorProducts = (vid: number, params: RequestParams = {}) => + this.request({ + path: `/dcl/model/models/${vid}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryModel + * @summary Queries a Model by index. + * @request GET:/dcl/model/models/{vid}/{pid} + */ + queryModel = (vid: number, pid: number, params: RequestParams = {}) => + this.request({ + path: `/dcl/model/models/${vid}/${pid}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryModelVersions + * @summary Queries ModelVersions by index. + * @request GET:/dcl/model/versions/{vid}/{pid} + */ + queryModelVersions = (vid: number, pid: number, params: RequestParams = {}) => + this.request({ + path: `/dcl/model/versions/${vid}/${pid}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryModelVersion + * @summary Queries a ModelVersion by index. + * @request GET:/dcl/model/versions/{vid}/{pid}/{softwareVersion} + */ + queryModelVersion = (vid: number, pid: number, softwareVersion: number, params: RequestParams = {}) => + this.request({ + path: `/dcl/model/versions/${vid}/${pid}/${softwareVersion}`, + method: "GET", + format: "json", + ...params, + }); +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types.ts new file mode 100755 index 000000000..c20ed888f --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types.ts @@ -0,0 +1,15 @@ +import { Model } from "./types/zigbeealliance/distributedcomplianceledger/model/model" +import { ModelVersion } from "./types/zigbeealliance/distributedcomplianceledger/model/model_version" +import { ModelVersions } from "./types/zigbeealliance/distributedcomplianceledger/model/model_versions" +import { Product } from "./types/zigbeealliance/distributedcomplianceledger/model/product" +import { VendorProducts } from "./types/zigbeealliance/distributedcomplianceledger/model/vendor_products" + + +export { + Model, + ModelVersion, + ModelVersions, + Product, + VendorProducts, + + } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types/cosmos/base/query/v1beta1/pagination.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/cosmos/base/query/v1beta1/pagination.ts new file mode 100644 index 000000000..a31ca249d --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/cosmos/base/query/v1beta1/pagination.ts @@ -0,0 +1,286 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.base.query.v1beta1"; + +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset: number; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: number; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + countTotal: boolean; + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse: boolean; +} + +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total: number; +} + +function createBasePageRequest(): PageRequest { + return { key: new Uint8Array(), offset: 0, limit: 0, countTotal: false, reverse: false }; +} + +export const PageRequest = { + encode(message: PageRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.offset !== 0) { + writer.uint32(16).uint64(message.offset); + } + if (message.limit !== 0) { + writer.uint32(24).uint64(message.limit); + } + if (message.countTotal === true) { + writer.uint32(32).bool(message.countTotal); + } + if (message.reverse === true) { + writer.uint32(40).bool(message.reverse); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + case 2: + message.offset = longToNumber(reader.uint64() as Long); + break; + case 3: + message.limit = longToNumber(reader.uint64() as Long); + break; + case 4: + message.countTotal = reader.bool(); + break; + case 5: + message.reverse = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageRequest { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + offset: isSet(object.offset) ? Number(object.offset) : 0, + limit: isSet(object.limit) ? Number(object.limit) : 0, + countTotal: isSet(object.countTotal) ? Boolean(object.countTotal) : false, + reverse: isSet(object.reverse) ? Boolean(object.reverse) : false, + }; + }, + + toJSON(message: PageRequest): unknown { + const obj: any = {}; + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.offset !== undefined && (obj.offset = Math.round(message.offset)); + message.limit !== undefined && (obj.limit = Math.round(message.limit)); + message.countTotal !== undefined && (obj.countTotal = message.countTotal); + message.reverse !== undefined && (obj.reverse = message.reverse); + return obj; + }, + + fromPartial, I>>(object: I): PageRequest { + const message = createBasePageRequest(); + message.key = object.key ?? new Uint8Array(); + message.offset = object.offset ?? 0; + message.limit = object.limit ?? 0; + message.countTotal = object.countTotal ?? false; + message.reverse = object.reverse ?? false; + return message; + }, +}; + +function createBasePageResponse(): PageResponse { + return { nextKey: new Uint8Array(), total: 0 }; +} + +export const PageResponse = { + encode(message: PageResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextKey.length !== 0) { + writer.uint32(10).bytes(message.nextKey); + } + if (message.total !== 0) { + writer.uint32(16).uint64(message.total); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nextKey = reader.bytes(); + break; + case 2: + message.total = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageResponse { + return { + nextKey: isSet(object.nextKey) ? bytesFromBase64(object.nextKey) : new Uint8Array(), + total: isSet(object.total) ? Number(object.total) : 0, + }; + }, + + toJSON(message: PageResponse): unknown { + const obj: any = {}; + message.nextKey !== undefined + && (obj.nextKey = base64FromBytes(message.nextKey !== undefined ? message.nextKey : new Uint8Array())); + message.total !== undefined && (obj.total = Math.round(message.total)); + return obj; + }, + + fromPartial, I>>(object: I): PageResponse { + const message = createBasePageResponse(); + message.nextKey = object.nextKey ?? new Uint8Array(); + message.total = object.total ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types/cosmos_proto/cosmos.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..79c8d3486 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/cosmos_proto/cosmos.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types/gogoproto/gogo.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types/google/api/annotations.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/google/api/annotations.ts new file mode 100644 index 000000000..aace4787f --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/google/api/annotations.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "google.api"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types/google/api/http.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/google/api/http.ts new file mode 100644 index 000000000..17e770d15 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/google/api/http.ts @@ -0,0 +1,589 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get: + | string + | undefined; + /** Used for updating a resource. */ + put: + | string + | undefined; + /** Used for creating a resource. */ + post: + | string + | undefined; + /** Used for deleting a resource. */ + delete: + | string + | undefined; + /** Used for updating a resource. */ + patch: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined + && (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined + && (obj.custom = message.custom ? CustomHttpPattern.toJSON(message.custom) : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.additionalBindings = []; + } + return obj; + }, + + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types/google/protobuf/descriptor.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/genesis.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/genesis.ts new file mode 100644 index 000000000..ef0830cec --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/genesis.ts @@ -0,0 +1,126 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Model } from "./model"; +import { ModelVersion } from "./model_version"; +import { ModelVersions } from "./model_versions"; +import { VendorProducts } from "./vendor_products"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.model"; + +/** GenesisState defines the model module's genesis state. */ +export interface GenesisState { + vendorProductsList: VendorProducts[]; + modelList: Model[]; + modelVersionList: ModelVersion[]; + /** this line is used by starport scaffolding # genesis/proto/state */ + modelVersionsList: ModelVersions[]; +} + +function createBaseGenesisState(): GenesisState { + return { vendorProductsList: [], modelList: [], modelVersionList: [], modelVersionsList: [] }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.vendorProductsList) { + VendorProducts.encode(v!, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.modelList) { + Model.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.modelVersionList) { + ModelVersion.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.modelVersionsList) { + ModelVersions.encode(v!, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vendorProductsList.push(VendorProducts.decode(reader, reader.uint32())); + break; + case 2: + message.modelList.push(Model.decode(reader, reader.uint32())); + break; + case 3: + message.modelVersionList.push(ModelVersion.decode(reader, reader.uint32())); + break; + case 4: + message.modelVersionsList.push(ModelVersions.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GenesisState { + return { + vendorProductsList: Array.isArray(object?.vendorProductsList) + ? object.vendorProductsList.map((e: any) => VendorProducts.fromJSON(e)) + : [], + modelList: Array.isArray(object?.modelList) ? object.modelList.map((e: any) => Model.fromJSON(e)) : [], + modelVersionList: Array.isArray(object?.modelVersionList) + ? object.modelVersionList.map((e: any) => ModelVersion.fromJSON(e)) + : [], + modelVersionsList: Array.isArray(object?.modelVersionsList) + ? object.modelVersionsList.map((e: any) => ModelVersions.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GenesisState): unknown { + const obj: any = {}; + if (message.vendorProductsList) { + obj.vendorProductsList = message.vendorProductsList.map((e) => e ? VendorProducts.toJSON(e) : undefined); + } else { + obj.vendorProductsList = []; + } + if (message.modelList) { + obj.modelList = message.modelList.map((e) => e ? Model.toJSON(e) : undefined); + } else { + obj.modelList = []; + } + if (message.modelVersionList) { + obj.modelVersionList = message.modelVersionList.map((e) => e ? ModelVersion.toJSON(e) : undefined); + } else { + obj.modelVersionList = []; + } + if (message.modelVersionsList) { + obj.modelVersionsList = message.modelVersionsList.map((e) => e ? ModelVersions.toJSON(e) : undefined); + } else { + obj.modelVersionsList = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GenesisState { + const message = createBaseGenesisState(); + message.vendorProductsList = object.vendorProductsList?.map((e) => VendorProducts.fromPartial(e)) || []; + message.modelList = object.modelList?.map((e) => Model.fromPartial(e)) || []; + message.modelVersionList = object.modelVersionList?.map((e) => ModelVersion.fromPartial(e)) || []; + message.modelVersionsList = object.modelVersionsList?.map((e) => ModelVersions.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/model.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/model.ts new file mode 100644 index 000000000..cfe021f18 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/model.ts @@ -0,0 +1,302 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.model"; + +export interface Model { + vid: number; + pid: number; + deviceTypeId: number; + productName: string; + productLabel: string; + partNumber: string; + commissioningCustomFlow: number; + commissioningCustomFlowUrl: string; + commissioningModeInitialStepsHint: number; + commissioningModeInitialStepsInstruction: string; + commissioningModeSecondaryStepsHint: number; + commissioningModeSecondaryStepsInstruction: string; + userManualUrl: string; + supportUrl: string; + productUrl: string; + lsfUrl: string; + lsfRevision: number; + creator: string; + schemaVersion: number; + commissionerRemoteUiFlowUrl: string; +} + +function createBaseModel(): Model { + return { + vid: 0, + pid: 0, + deviceTypeId: 0, + productName: "", + productLabel: "", + partNumber: "", + commissioningCustomFlow: 0, + commissioningCustomFlowUrl: "", + commissioningModeInitialStepsHint: 0, + commissioningModeInitialStepsInstruction: "", + commissioningModeSecondaryStepsHint: 0, + commissioningModeSecondaryStepsInstruction: "", + userManualUrl: "", + supportUrl: "", + productUrl: "", + lsfUrl: "", + lsfRevision: 0, + creator: "", + schemaVersion: 0, + commissionerRemoteUiFlowUrl: "", + }; +} + +export const Model = { + encode(message: Model, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(16).int32(message.pid); + } + if (message.deviceTypeId !== 0) { + writer.uint32(24).int32(message.deviceTypeId); + } + if (message.productName !== "") { + writer.uint32(34).string(message.productName); + } + if (message.productLabel !== "") { + writer.uint32(42).string(message.productLabel); + } + if (message.partNumber !== "") { + writer.uint32(50).string(message.partNumber); + } + if (message.commissioningCustomFlow !== 0) { + writer.uint32(56).int32(message.commissioningCustomFlow); + } + if (message.commissioningCustomFlowUrl !== "") { + writer.uint32(66).string(message.commissioningCustomFlowUrl); + } + if (message.commissioningModeInitialStepsHint !== 0) { + writer.uint32(72).uint32(message.commissioningModeInitialStepsHint); + } + if (message.commissioningModeInitialStepsInstruction !== "") { + writer.uint32(82).string(message.commissioningModeInitialStepsInstruction); + } + if (message.commissioningModeSecondaryStepsHint !== 0) { + writer.uint32(88).uint32(message.commissioningModeSecondaryStepsHint); + } + if (message.commissioningModeSecondaryStepsInstruction !== "") { + writer.uint32(98).string(message.commissioningModeSecondaryStepsInstruction); + } + if (message.userManualUrl !== "") { + writer.uint32(106).string(message.userManualUrl); + } + if (message.supportUrl !== "") { + writer.uint32(114).string(message.supportUrl); + } + if (message.productUrl !== "") { + writer.uint32(122).string(message.productUrl); + } + if (message.lsfUrl !== "") { + writer.uint32(130).string(message.lsfUrl); + } + if (message.lsfRevision !== 0) { + writer.uint32(136).int32(message.lsfRevision); + } + if (message.creator !== "") { + writer.uint32(146).string(message.creator); + } + if (message.schemaVersion !== 0) { + writer.uint32(152).uint32(message.schemaVersion); + } + if (message.commissionerRemoteUiFlowUrl !== "") { + writer.uint32(162).string(message.commissionerRemoteUiFlowUrl); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Model { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModel(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.pid = reader.int32(); + break; + case 3: + message.deviceTypeId = reader.int32(); + break; + case 4: + message.productName = reader.string(); + break; + case 5: + message.productLabel = reader.string(); + break; + case 6: + message.partNumber = reader.string(); + break; + case 7: + message.commissioningCustomFlow = reader.int32(); + break; + case 8: + message.commissioningCustomFlowUrl = reader.string(); + break; + case 9: + message.commissioningModeInitialStepsHint = reader.uint32(); + break; + case 10: + message.commissioningModeInitialStepsInstruction = reader.string(); + break; + case 11: + message.commissioningModeSecondaryStepsHint = reader.uint32(); + break; + case 12: + message.commissioningModeSecondaryStepsInstruction = reader.string(); + break; + case 13: + message.userManualUrl = reader.string(); + break; + case 14: + message.supportUrl = reader.string(); + break; + case 15: + message.productUrl = reader.string(); + break; + case 16: + message.lsfUrl = reader.string(); + break; + case 17: + message.lsfRevision = reader.int32(); + break; + case 18: + message.creator = reader.string(); + break; + case 19: + message.schemaVersion = reader.uint32(); + break; + case 20: + message.commissionerRemoteUiFlowUrl = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Model { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + deviceTypeId: isSet(object.deviceTypeId) ? Number(object.deviceTypeId) : 0, + productName: isSet(object.productName) ? String(object.productName) : "", + productLabel: isSet(object.productLabel) ? String(object.productLabel) : "", + partNumber: isSet(object.partNumber) ? String(object.partNumber) : "", + commissioningCustomFlow: isSet(object.commissioningCustomFlow) ? Number(object.commissioningCustomFlow) : 0, + commissioningCustomFlowUrl: isSet(object.commissioningCustomFlowUrl) + ? String(object.commissioningCustomFlowUrl) + : "", + commissioningModeInitialStepsHint: isSet(object.commissioningModeInitialStepsHint) + ? Number(object.commissioningModeInitialStepsHint) + : 0, + commissioningModeInitialStepsInstruction: isSet(object.commissioningModeInitialStepsInstruction) + ? String(object.commissioningModeInitialStepsInstruction) + : "", + commissioningModeSecondaryStepsHint: isSet(object.commissioningModeSecondaryStepsHint) + ? Number(object.commissioningModeSecondaryStepsHint) + : 0, + commissioningModeSecondaryStepsInstruction: isSet(object.commissioningModeSecondaryStepsInstruction) + ? String(object.commissioningModeSecondaryStepsInstruction) + : "", + userManualUrl: isSet(object.userManualUrl) ? String(object.userManualUrl) : "", + supportUrl: isSet(object.supportUrl) ? String(object.supportUrl) : "", + productUrl: isSet(object.productUrl) ? String(object.productUrl) : "", + lsfUrl: isSet(object.lsfUrl) ? String(object.lsfUrl) : "", + lsfRevision: isSet(object.lsfRevision) ? Number(object.lsfRevision) : 0, + creator: isSet(object.creator) ? String(object.creator) : "", + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + commissionerRemoteUiFlowUrl: isSet(object.commissionerRemoteUiFlowUrl) + ? String(object.commissionerRemoteUiFlowUrl) + : "", + }; + }, + + toJSON(message: Model): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.deviceTypeId !== undefined && (obj.deviceTypeId = Math.round(message.deviceTypeId)); + message.productName !== undefined && (obj.productName = message.productName); + message.productLabel !== undefined && (obj.productLabel = message.productLabel); + message.partNumber !== undefined && (obj.partNumber = message.partNumber); + message.commissioningCustomFlow !== undefined + && (obj.commissioningCustomFlow = Math.round(message.commissioningCustomFlow)); + message.commissioningCustomFlowUrl !== undefined + && (obj.commissioningCustomFlowUrl = message.commissioningCustomFlowUrl); + message.commissioningModeInitialStepsHint !== undefined + && (obj.commissioningModeInitialStepsHint = Math.round(message.commissioningModeInitialStepsHint)); + message.commissioningModeInitialStepsInstruction !== undefined + && (obj.commissioningModeInitialStepsInstruction = message.commissioningModeInitialStepsInstruction); + message.commissioningModeSecondaryStepsHint !== undefined + && (obj.commissioningModeSecondaryStepsHint = Math.round(message.commissioningModeSecondaryStepsHint)); + message.commissioningModeSecondaryStepsInstruction !== undefined + && (obj.commissioningModeSecondaryStepsInstruction = message.commissioningModeSecondaryStepsInstruction); + message.userManualUrl !== undefined && (obj.userManualUrl = message.userManualUrl); + message.supportUrl !== undefined && (obj.supportUrl = message.supportUrl); + message.productUrl !== undefined && (obj.productUrl = message.productUrl); + message.lsfUrl !== undefined && (obj.lsfUrl = message.lsfUrl); + message.lsfRevision !== undefined && (obj.lsfRevision = Math.round(message.lsfRevision)); + message.creator !== undefined && (obj.creator = message.creator); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + message.commissionerRemoteUiFlowUrl !== undefined + && (obj.commissionerRemoteUiFlowUrl = message.commissionerRemoteUiFlowUrl); + return obj; + }, + + fromPartial, I>>(object: I): Model { + const message = createBaseModel(); + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.deviceTypeId = object.deviceTypeId ?? 0; + message.productName = object.productName ?? ""; + message.productLabel = object.productLabel ?? ""; + message.partNumber = object.partNumber ?? ""; + message.commissioningCustomFlow = object.commissioningCustomFlow ?? 0; + message.commissioningCustomFlowUrl = object.commissioningCustomFlowUrl ?? ""; + message.commissioningModeInitialStepsHint = object.commissioningModeInitialStepsHint ?? 0; + message.commissioningModeInitialStepsInstruction = object.commissioningModeInitialStepsInstruction ?? ""; + message.commissioningModeSecondaryStepsHint = object.commissioningModeSecondaryStepsHint ?? 0; + message.commissioningModeSecondaryStepsInstruction = object.commissioningModeSecondaryStepsInstruction ?? ""; + message.userManualUrl = object.userManualUrl ?? ""; + message.supportUrl = object.supportUrl ?? ""; + message.productUrl = object.productUrl ?? ""; + message.lsfUrl = object.lsfUrl ?? ""; + message.lsfRevision = object.lsfRevision ?? 0; + message.creator = object.creator ?? ""; + message.schemaVersion = object.schemaVersion ?? 0; + message.commissionerRemoteUiFlowUrl = object.commissionerRemoteUiFlowUrl ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/model_version.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/model_version.ts new file mode 100644 index 000000000..4554f6efa --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/model_version.ts @@ -0,0 +1,277 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.model"; + +export interface ModelVersion { + vid: number; + pid: number; + softwareVersion: number; + softwareVersionString: string; + cdVersionNumber: number; + firmwareInformation: string; + softwareVersionValid: boolean; + otaUrl: string; + otaFileSize: number; + otaChecksum: string; + otaChecksumType: number; + minApplicableSoftwareVersion: number; + maxApplicableSoftwareVersion: number; + releaseNotesUrl: string; + creator: string; + schemaVersion: number; +} + +function createBaseModelVersion(): ModelVersion { + return { + vid: 0, + pid: 0, + softwareVersion: 0, + softwareVersionString: "", + cdVersionNumber: 0, + firmwareInformation: "", + softwareVersionValid: false, + otaUrl: "", + otaFileSize: 0, + otaChecksum: "", + otaChecksumType: 0, + minApplicableSoftwareVersion: 0, + maxApplicableSoftwareVersion: 0, + releaseNotesUrl: "", + creator: "", + schemaVersion: 0, + }; +} + +export const ModelVersion = { + encode(message: ModelVersion, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(16).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(24).uint32(message.softwareVersion); + } + if (message.softwareVersionString !== "") { + writer.uint32(34).string(message.softwareVersionString); + } + if (message.cdVersionNumber !== 0) { + writer.uint32(40).int32(message.cdVersionNumber); + } + if (message.firmwareInformation !== "") { + writer.uint32(50).string(message.firmwareInformation); + } + if (message.softwareVersionValid === true) { + writer.uint32(56).bool(message.softwareVersionValid); + } + if (message.otaUrl !== "") { + writer.uint32(66).string(message.otaUrl); + } + if (message.otaFileSize !== 0) { + writer.uint32(72).uint64(message.otaFileSize); + } + if (message.otaChecksum !== "") { + writer.uint32(82).string(message.otaChecksum); + } + if (message.otaChecksumType !== 0) { + writer.uint32(88).int32(message.otaChecksumType); + } + if (message.minApplicableSoftwareVersion !== 0) { + writer.uint32(96).uint32(message.minApplicableSoftwareVersion); + } + if (message.maxApplicableSoftwareVersion !== 0) { + writer.uint32(104).uint32(message.maxApplicableSoftwareVersion); + } + if (message.releaseNotesUrl !== "") { + writer.uint32(114).string(message.releaseNotesUrl); + } + if (message.creator !== "") { + writer.uint32(122).string(message.creator); + } + if (message.schemaVersion !== 0) { + writer.uint32(128).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModelVersion { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModelVersion(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.pid = reader.int32(); + break; + case 3: + message.softwareVersion = reader.uint32(); + break; + case 4: + message.softwareVersionString = reader.string(); + break; + case 5: + message.cdVersionNumber = reader.int32(); + break; + case 6: + message.firmwareInformation = reader.string(); + break; + case 7: + message.softwareVersionValid = reader.bool(); + break; + case 8: + message.otaUrl = reader.string(); + break; + case 9: + message.otaFileSize = longToNumber(reader.uint64() as Long); + break; + case 10: + message.otaChecksum = reader.string(); + break; + case 11: + message.otaChecksumType = reader.int32(); + break; + case 12: + message.minApplicableSoftwareVersion = reader.uint32(); + break; + case 13: + message.maxApplicableSoftwareVersion = reader.uint32(); + break; + case 14: + message.releaseNotesUrl = reader.string(); + break; + case 15: + message.creator = reader.string(); + break; + case 16: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ModelVersion { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + softwareVersionString: isSet(object.softwareVersionString) ? String(object.softwareVersionString) : "", + cdVersionNumber: isSet(object.cdVersionNumber) ? Number(object.cdVersionNumber) : 0, + firmwareInformation: isSet(object.firmwareInformation) ? String(object.firmwareInformation) : "", + softwareVersionValid: isSet(object.softwareVersionValid) ? Boolean(object.softwareVersionValid) : false, + otaUrl: isSet(object.otaUrl) ? String(object.otaUrl) : "", + otaFileSize: isSet(object.otaFileSize) ? Number(object.otaFileSize) : 0, + otaChecksum: isSet(object.otaChecksum) ? String(object.otaChecksum) : "", + otaChecksumType: isSet(object.otaChecksumType) ? Number(object.otaChecksumType) : 0, + minApplicableSoftwareVersion: isSet(object.minApplicableSoftwareVersion) + ? Number(object.minApplicableSoftwareVersion) + : 0, + maxApplicableSoftwareVersion: isSet(object.maxApplicableSoftwareVersion) + ? Number(object.maxApplicableSoftwareVersion) + : 0, + releaseNotesUrl: isSet(object.releaseNotesUrl) ? String(object.releaseNotesUrl) : "", + creator: isSet(object.creator) ? String(object.creator) : "", + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: ModelVersion): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.softwareVersionString !== undefined && (obj.softwareVersionString = message.softwareVersionString); + message.cdVersionNumber !== undefined && (obj.cdVersionNumber = Math.round(message.cdVersionNumber)); + message.firmwareInformation !== undefined && (obj.firmwareInformation = message.firmwareInformation); + message.softwareVersionValid !== undefined && (obj.softwareVersionValid = message.softwareVersionValid); + message.otaUrl !== undefined && (obj.otaUrl = message.otaUrl); + message.otaFileSize !== undefined && (obj.otaFileSize = Math.round(message.otaFileSize)); + message.otaChecksum !== undefined && (obj.otaChecksum = message.otaChecksum); + message.otaChecksumType !== undefined && (obj.otaChecksumType = Math.round(message.otaChecksumType)); + message.minApplicableSoftwareVersion !== undefined + && (obj.minApplicableSoftwareVersion = Math.round(message.minApplicableSoftwareVersion)); + message.maxApplicableSoftwareVersion !== undefined + && (obj.maxApplicableSoftwareVersion = Math.round(message.maxApplicableSoftwareVersion)); + message.releaseNotesUrl !== undefined && (obj.releaseNotesUrl = message.releaseNotesUrl); + message.creator !== undefined && (obj.creator = message.creator); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): ModelVersion { + const message = createBaseModelVersion(); + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.softwareVersionString = object.softwareVersionString ?? ""; + message.cdVersionNumber = object.cdVersionNumber ?? 0; + message.firmwareInformation = object.firmwareInformation ?? ""; + message.softwareVersionValid = object.softwareVersionValid ?? false; + message.otaUrl = object.otaUrl ?? ""; + message.otaFileSize = object.otaFileSize ?? 0; + message.otaChecksum = object.otaChecksum ?? ""; + message.otaChecksumType = object.otaChecksumType ?? 0; + message.minApplicableSoftwareVersion = object.minApplicableSoftwareVersion ?? 0; + message.maxApplicableSoftwareVersion = object.maxApplicableSoftwareVersion ?? 0; + message.releaseNotesUrl = object.releaseNotesUrl ?? ""; + message.creator = object.creator ?? ""; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/model_versions.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/model_versions.ts new file mode 100644 index 000000000..a0ed248de --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/model_versions.ts @@ -0,0 +1,107 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.model"; + +export interface ModelVersions { + vid: number; + pid: number; + softwareVersions: number[]; +} + +function createBaseModelVersions(): ModelVersions { + return { vid: 0, pid: 0, softwareVersions: [] }; +} + +export const ModelVersions = { + encode(message: ModelVersions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(16).int32(message.pid); + } + writer.uint32(26).fork(); + for (const v of message.softwareVersions) { + writer.uint32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ModelVersions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModelVersions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.pid = reader.int32(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.softwareVersions.push(reader.uint32()); + } + } else { + message.softwareVersions.push(reader.uint32()); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ModelVersions { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersions: Array.isArray(object?.softwareVersions) + ? object.softwareVersions.map((e: any) => Number(e)) + : [], + }; + }, + + toJSON(message: ModelVersions): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + if (message.softwareVersions) { + obj.softwareVersions = message.softwareVersions.map((e) => Math.round(e)); + } else { + obj.softwareVersions = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ModelVersions { + const message = createBaseModelVersions(); + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersions = object.softwareVersions?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/product.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/product.ts new file mode 100644 index 000000000..28b33ea0d --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/product.ts @@ -0,0 +1,92 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.model"; + +export interface Product { + pid: number; + name: string; + partNumber: string; +} + +function createBaseProduct(): Product { + return { pid: 0, name: "", partNumber: "" }; +} + +export const Product = { + encode(message: Product, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pid !== 0) { + writer.uint32(8).int32(message.pid); + } + if (message.name !== "") { + writer.uint32(18).string(message.name); + } + if (message.partNumber !== "") { + writer.uint32(26).string(message.partNumber); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Product { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProduct(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pid = reader.int32(); + break; + case 2: + message.name = reader.string(); + break; + case 3: + message.partNumber = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Product { + return { + pid: isSet(object.pid) ? Number(object.pid) : 0, + name: isSet(object.name) ? String(object.name) : "", + partNumber: isSet(object.partNumber) ? String(object.partNumber) : "", + }; + }, + + toJSON(message: Product): unknown { + const obj: any = {}; + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.name !== undefined && (obj.name = message.name); + message.partNumber !== undefined && (obj.partNumber = message.partNumber); + return obj; + }, + + fromPartial, I>>(object: I): Product { + const message = createBaseProduct(); + message.pid = object.pid ?? 0; + message.name = object.name ?? ""; + message.partNumber = object.partNumber ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/query.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/query.ts new file mode 100644 index 000000000..7a5e5ef6a --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/query.ts @@ -0,0 +1,672 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { PageRequest, PageResponse } from "../../../cosmos/base/query/v1beta1/pagination"; +import { Model } from "./model"; +import { ModelVersion } from "./model_version"; +import { ModelVersions } from "./model_versions"; +import { VendorProducts } from "./vendor_products"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.model"; + +export interface QueryGetVendorProductsRequest { + vid: number; +} + +export interface QueryGetVendorProductsResponse { + vendorProducts: VendorProducts | undefined; +} + +export interface QueryGetModelRequest { + vid: number; + pid: number; +} + +export interface QueryGetModelResponse { + model: Model | undefined; +} + +export interface QueryAllModelRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllModelResponse { + model: Model[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetModelVersionRequest { + vid: number; + pid: number; + softwareVersion: number; +} + +export interface QueryGetModelVersionResponse { + modelVersion: ModelVersion | undefined; +} + +export interface QueryGetModelVersionsRequest { + vid: number; + pid: number; +} + +export interface QueryGetModelVersionsResponse { + modelVersions: ModelVersions | undefined; +} + +function createBaseQueryGetVendorProductsRequest(): QueryGetVendorProductsRequest { + return { vid: 0 }; +} + +export const QueryGetVendorProductsRequest = { + encode(message: QueryGetVendorProductsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetVendorProductsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetVendorProductsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetVendorProductsRequest { + return { vid: isSet(object.vid) ? Number(object.vid) : 0 }; + }, + + toJSON(message: QueryGetVendorProductsRequest): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetVendorProductsRequest { + const message = createBaseQueryGetVendorProductsRequest(); + message.vid = object.vid ?? 0; + return message; + }, +}; + +function createBaseQueryGetVendorProductsResponse(): QueryGetVendorProductsResponse { + return { vendorProducts: undefined }; +} + +export const QueryGetVendorProductsResponse = { + encode(message: QueryGetVendorProductsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vendorProducts !== undefined) { + VendorProducts.encode(message.vendorProducts, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetVendorProductsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetVendorProductsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vendorProducts = VendorProducts.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetVendorProductsResponse { + return { + vendorProducts: isSet(object.vendorProducts) ? VendorProducts.fromJSON(object.vendorProducts) : undefined, + }; + }, + + toJSON(message: QueryGetVendorProductsResponse): unknown { + const obj: any = {}; + message.vendorProducts !== undefined + && (obj.vendorProducts = message.vendorProducts ? VendorProducts.toJSON(message.vendorProducts) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetVendorProductsResponse { + const message = createBaseQueryGetVendorProductsResponse(); + message.vendorProducts = (object.vendorProducts !== undefined && object.vendorProducts !== null) + ? VendorProducts.fromPartial(object.vendorProducts) + : undefined; + return message; + }, +}; + +function createBaseQueryGetModelRequest(): QueryGetModelRequest { + return { vid: 0, pid: 0 }; +} + +export const QueryGetModelRequest = { + encode(message: QueryGetModelRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(16).int32(message.pid); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetModelRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetModelRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.pid = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetModelRequest { + return { vid: isSet(object.vid) ? Number(object.vid) : 0, pid: isSet(object.pid) ? Number(object.pid) : 0 }; + }, + + toJSON(message: QueryGetModelRequest): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + return obj; + }, + + fromPartial, I>>(object: I): QueryGetModelRequest { + const message = createBaseQueryGetModelRequest(); + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + return message; + }, +}; + +function createBaseQueryGetModelResponse(): QueryGetModelResponse { + return { model: undefined }; +} + +export const QueryGetModelResponse = { + encode(message: QueryGetModelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.model !== undefined) { + Model.encode(message.model, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetModelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetModelResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.model = Model.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetModelResponse { + return { model: isSet(object.model) ? Model.fromJSON(object.model) : undefined }; + }, + + toJSON(message: QueryGetModelResponse): unknown { + const obj: any = {}; + message.model !== undefined && (obj.model = message.model ? Model.toJSON(message.model) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryGetModelResponse { + const message = createBaseQueryGetModelResponse(); + message.model = (object.model !== undefined && object.model !== null) ? Model.fromPartial(object.model) : undefined; + return message; + }, +}; + +function createBaseQueryAllModelRequest(): QueryAllModelRequest { + return { pagination: undefined }; +} + +export const QueryAllModelRequest = { + encode(message: QueryAllModelRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllModelRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllModelRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllModelRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllModelRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryAllModelRequest { + const message = createBaseQueryAllModelRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllModelResponse(): QueryAllModelResponse { + return { model: [], pagination: undefined }; +} + +export const QueryAllModelResponse = { + encode(message: QueryAllModelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.model) { + Model.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllModelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllModelResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.model.push(Model.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllModelResponse { + return { + model: Array.isArray(object?.model) ? object.model.map((e: any) => Model.fromJSON(e)) : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllModelResponse): unknown { + const obj: any = {}; + if (message.model) { + obj.model = message.model.map((e) => e ? Model.toJSON(e) : undefined); + } else { + obj.model = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryAllModelResponse { + const message = createBaseQueryAllModelResponse(); + message.model = object.model?.map((e) => Model.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetModelVersionRequest(): QueryGetModelVersionRequest { + return { vid: 0, pid: 0, softwareVersion: 0 }; +} + +export const QueryGetModelVersionRequest = { + encode(message: QueryGetModelVersionRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(16).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(24).uint32(message.softwareVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetModelVersionRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetModelVersionRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.pid = reader.int32(); + break; + case 3: + message.softwareVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetModelVersionRequest { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + }; + }, + + toJSON(message: QueryGetModelVersionRequest): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + return obj; + }, + + fromPartial, I>>(object: I): QueryGetModelVersionRequest { + const message = createBaseQueryGetModelVersionRequest(); + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + return message; + }, +}; + +function createBaseQueryGetModelVersionResponse(): QueryGetModelVersionResponse { + return { modelVersion: undefined }; +} + +export const QueryGetModelVersionResponse = { + encode(message: QueryGetModelVersionResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.modelVersion !== undefined) { + ModelVersion.encode(message.modelVersion, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetModelVersionResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetModelVersionResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.modelVersion = ModelVersion.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetModelVersionResponse { + return { modelVersion: isSet(object.modelVersion) ? ModelVersion.fromJSON(object.modelVersion) : undefined }; + }, + + toJSON(message: QueryGetModelVersionResponse): unknown { + const obj: any = {}; + message.modelVersion !== undefined + && (obj.modelVersion = message.modelVersion ? ModelVersion.toJSON(message.modelVersion) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryGetModelVersionResponse { + const message = createBaseQueryGetModelVersionResponse(); + message.modelVersion = (object.modelVersion !== undefined && object.modelVersion !== null) + ? ModelVersion.fromPartial(object.modelVersion) + : undefined; + return message; + }, +}; + +function createBaseQueryGetModelVersionsRequest(): QueryGetModelVersionsRequest { + return { vid: 0, pid: 0 }; +} + +export const QueryGetModelVersionsRequest = { + encode(message: QueryGetModelVersionsRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(16).int32(message.pid); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetModelVersionsRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetModelVersionsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.pid = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetModelVersionsRequest { + return { vid: isSet(object.vid) ? Number(object.vid) : 0, pid: isSet(object.pid) ? Number(object.pid) : 0 }; + }, + + toJSON(message: QueryGetModelVersionsRequest): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + return obj; + }, + + fromPartial, I>>(object: I): QueryGetModelVersionsRequest { + const message = createBaseQueryGetModelVersionsRequest(); + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + return message; + }, +}; + +function createBaseQueryGetModelVersionsResponse(): QueryGetModelVersionsResponse { + return { modelVersions: undefined }; +} + +export const QueryGetModelVersionsResponse = { + encode(message: QueryGetModelVersionsResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.modelVersions !== undefined) { + ModelVersions.encode(message.modelVersions, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetModelVersionsResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetModelVersionsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.modelVersions = ModelVersions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetModelVersionsResponse { + return { modelVersions: isSet(object.modelVersions) ? ModelVersions.fromJSON(object.modelVersions) : undefined }; + }, + + toJSON(message: QueryGetModelVersionsResponse): unknown { + const obj: any = {}; + message.modelVersions !== undefined + && (obj.modelVersions = message.modelVersions ? ModelVersions.toJSON(message.modelVersions) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetModelVersionsResponse { + const message = createBaseQueryGetModelVersionsResponse(); + message.modelVersions = (object.modelVersions !== undefined && object.modelVersions !== null) + ? ModelVersions.fromPartial(object.modelVersions) + : undefined; + return message; + }, +}; + +/** Query defines the gRPC querier service. */ +export interface Query { + /** Queries VendorProducts by index. */ + VendorProducts(request: QueryGetVendorProductsRequest): Promise; + /** Queries a Model by index. */ + Model(request: QueryGetModelRequest): Promise; + /** Queries a list of all Model items. */ + ModelAll(request: QueryAllModelRequest): Promise; + /** Queries a ModelVersion by index. */ + ModelVersion(request: QueryGetModelVersionRequest): Promise; + /** Queries ModelVersions by index. */ + ModelVersions(request: QueryGetModelVersionsRequest): Promise; +} + +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.VendorProducts = this.VendorProducts.bind(this); + this.Model = this.Model.bind(this); + this.ModelAll = this.ModelAll.bind(this); + this.ModelVersion = this.ModelVersion.bind(this); + this.ModelVersions = this.ModelVersions.bind(this); + } + VendorProducts(request: QueryGetVendorProductsRequest): Promise { + const data = QueryGetVendorProductsRequest.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.model.Query", "VendorProducts", data); + return promise.then((data) => QueryGetVendorProductsResponse.decode(new _m0.Reader(data))); + } + + Model(request: QueryGetModelRequest): Promise { + const data = QueryGetModelRequest.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.model.Query", "Model", data); + return promise.then((data) => QueryGetModelResponse.decode(new _m0.Reader(data))); + } + + ModelAll(request: QueryAllModelRequest): Promise { + const data = QueryAllModelRequest.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.model.Query", "ModelAll", data); + return promise.then((data) => QueryAllModelResponse.decode(new _m0.Reader(data))); + } + + ModelVersion(request: QueryGetModelVersionRequest): Promise { + const data = QueryGetModelVersionRequest.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.model.Query", "ModelVersion", data); + return promise.then((data) => QueryGetModelVersionResponse.decode(new _m0.Reader(data))); + } + + ModelVersions(request: QueryGetModelVersionsRequest): Promise { + const data = QueryGetModelVersionsRequest.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.model.Query", "ModelVersions", data); + return promise.then((data) => QueryGetModelVersionsResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/tx.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/tx.ts new file mode 100644 index 000000000..940c33b3d --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/tx.ts @@ -0,0 +1,1460 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.model"; + +export interface MsgCreateModel { + creator: string; + vid: number; + pid: number; + deviceTypeId: number; + productName: string; + productLabel: string; + partNumber: string; + commissioningCustomFlow: number; + commissioningCustomFlowUrl: string; + commissioningModeInitialStepsHint: number; + commissioningModeInitialStepsInstruction: string; + commissioningModeSecondaryStepsHint: number; + commissioningModeSecondaryStepsInstruction: string; + userManualUrl: string; + supportUrl: string; + productUrl: string; + lsfUrl: string; + schemaVersion: number; + commissionerRemoteUiFlowUrl: string; +} + +export interface MsgCreateModelResponse { +} + +export interface MsgUpdateModel { + creator: string; + vid: number; + pid: number; + productName: string; + productLabel: string; + partNumber: string; + commissioningCustomFlowUrl: string; + commissioningModeInitialStepsInstruction: string; + commissioningModeSecondaryStepsInstruction: string; + userManualUrl: string; + supportUrl: string; + productUrl: string; + lsfUrl: string; + lsfRevision: number; + schemaVersion: number; + commissionerRemoteUiFlowUrl: string; + commissioningModeInitialStepsHint: number; +} + +export interface MsgUpdateModelResponse { +} + +export interface MsgDeleteModel { + creator: string; + vid: number; + pid: number; +} + +export interface MsgDeleteModelResponse { +} + +export interface MsgCreateModelVersion { + creator: string; + vid: number; + pid: number; + softwareVersion: number; + softwareVersionString: string; + cdVersionNumber: number; + firmwareInformation: string; + softwareVersionValid: boolean; + otaUrl: string; + otaFileSize: number; + otaChecksum: string; + otaChecksumType: number; + minApplicableSoftwareVersion: number; + maxApplicableSoftwareVersion: number; + releaseNotesUrl: string; + schemaVersion: number; +} + +export interface MsgCreateModelVersionResponse { +} + +export interface MsgUpdateModelVersion { + creator: string; + vid: number; + pid: number; + softwareVersion: number; + softwareVersionValid: boolean; + otaUrl: string; + minApplicableSoftwareVersion: number; + maxApplicableSoftwareVersion: number; + releaseNotesUrl: string; + otaFileSize: number; + otaChecksum: string; + schemaVersion: number; +} + +export interface MsgUpdateModelVersionResponse { +} + +export interface MsgDeleteModelVersion { + creator: string; + vid: number; + pid: number; + softwareVersion: number; +} + +export interface MsgDeleteModelVersionResponse { +} + +function createBaseMsgCreateModel(): MsgCreateModel { + return { + creator: "", + vid: 0, + pid: 0, + deviceTypeId: 0, + productName: "", + productLabel: "", + partNumber: "", + commissioningCustomFlow: 0, + commissioningCustomFlowUrl: "", + commissioningModeInitialStepsHint: 0, + commissioningModeInitialStepsInstruction: "", + commissioningModeSecondaryStepsHint: 0, + commissioningModeSecondaryStepsInstruction: "", + userManualUrl: "", + supportUrl: "", + productUrl: "", + lsfUrl: "", + schemaVersion: 0, + commissionerRemoteUiFlowUrl: "", + }; +} + +export const MsgCreateModel = { + encode(message: MsgCreateModel, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.vid !== 0) { + writer.uint32(16).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(24).int32(message.pid); + } + if (message.deviceTypeId !== 0) { + writer.uint32(32).int32(message.deviceTypeId); + } + if (message.productName !== "") { + writer.uint32(42).string(message.productName); + } + if (message.productLabel !== "") { + writer.uint32(50).string(message.productLabel); + } + if (message.partNumber !== "") { + writer.uint32(58).string(message.partNumber); + } + if (message.commissioningCustomFlow !== 0) { + writer.uint32(64).int32(message.commissioningCustomFlow); + } + if (message.commissioningCustomFlowUrl !== "") { + writer.uint32(74).string(message.commissioningCustomFlowUrl); + } + if (message.commissioningModeInitialStepsHint !== 0) { + writer.uint32(80).uint32(message.commissioningModeInitialStepsHint); + } + if (message.commissioningModeInitialStepsInstruction !== "") { + writer.uint32(90).string(message.commissioningModeInitialStepsInstruction); + } + if (message.commissioningModeSecondaryStepsHint !== 0) { + writer.uint32(96).uint32(message.commissioningModeSecondaryStepsHint); + } + if (message.commissioningModeSecondaryStepsInstruction !== "") { + writer.uint32(106).string(message.commissioningModeSecondaryStepsInstruction); + } + if (message.userManualUrl !== "") { + writer.uint32(114).string(message.userManualUrl); + } + if (message.supportUrl !== "") { + writer.uint32(122).string(message.supportUrl); + } + if (message.productUrl !== "") { + writer.uint32(130).string(message.productUrl); + } + if (message.lsfUrl !== "") { + writer.uint32(138).string(message.lsfUrl); + } + if (message.schemaVersion !== 0) { + writer.uint32(144).uint32(message.schemaVersion); + } + if (message.commissionerRemoteUiFlowUrl !== "") { + writer.uint32(154).string(message.commissionerRemoteUiFlowUrl); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateModel { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateModel(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.vid = reader.int32(); + break; + case 3: + message.pid = reader.int32(); + break; + case 4: + message.deviceTypeId = reader.int32(); + break; + case 5: + message.productName = reader.string(); + break; + case 6: + message.productLabel = reader.string(); + break; + case 7: + message.partNumber = reader.string(); + break; + case 8: + message.commissioningCustomFlow = reader.int32(); + break; + case 9: + message.commissioningCustomFlowUrl = reader.string(); + break; + case 10: + message.commissioningModeInitialStepsHint = reader.uint32(); + break; + case 11: + message.commissioningModeInitialStepsInstruction = reader.string(); + break; + case 12: + message.commissioningModeSecondaryStepsHint = reader.uint32(); + break; + case 13: + message.commissioningModeSecondaryStepsInstruction = reader.string(); + break; + case 14: + message.userManualUrl = reader.string(); + break; + case 15: + message.supportUrl = reader.string(); + break; + case 16: + message.productUrl = reader.string(); + break; + case 17: + message.lsfUrl = reader.string(); + break; + case 18: + message.schemaVersion = reader.uint32(); + break; + case 19: + message.commissionerRemoteUiFlowUrl = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgCreateModel { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + deviceTypeId: isSet(object.deviceTypeId) ? Number(object.deviceTypeId) : 0, + productName: isSet(object.productName) ? String(object.productName) : "", + productLabel: isSet(object.productLabel) ? String(object.productLabel) : "", + partNumber: isSet(object.partNumber) ? String(object.partNumber) : "", + commissioningCustomFlow: isSet(object.commissioningCustomFlow) ? Number(object.commissioningCustomFlow) : 0, + commissioningCustomFlowUrl: isSet(object.commissioningCustomFlowUrl) + ? String(object.commissioningCustomFlowUrl) + : "", + commissioningModeInitialStepsHint: isSet(object.commissioningModeInitialStepsHint) + ? Number(object.commissioningModeInitialStepsHint) + : 0, + commissioningModeInitialStepsInstruction: isSet(object.commissioningModeInitialStepsInstruction) + ? String(object.commissioningModeInitialStepsInstruction) + : "", + commissioningModeSecondaryStepsHint: isSet(object.commissioningModeSecondaryStepsHint) + ? Number(object.commissioningModeSecondaryStepsHint) + : 0, + commissioningModeSecondaryStepsInstruction: isSet(object.commissioningModeSecondaryStepsInstruction) + ? String(object.commissioningModeSecondaryStepsInstruction) + : "", + userManualUrl: isSet(object.userManualUrl) ? String(object.userManualUrl) : "", + supportUrl: isSet(object.supportUrl) ? String(object.supportUrl) : "", + productUrl: isSet(object.productUrl) ? String(object.productUrl) : "", + lsfUrl: isSet(object.lsfUrl) ? String(object.lsfUrl) : "", + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + commissionerRemoteUiFlowUrl: isSet(object.commissionerRemoteUiFlowUrl) + ? String(object.commissionerRemoteUiFlowUrl) + : "", + }; + }, + + toJSON(message: MsgCreateModel): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.deviceTypeId !== undefined && (obj.deviceTypeId = Math.round(message.deviceTypeId)); + message.productName !== undefined && (obj.productName = message.productName); + message.productLabel !== undefined && (obj.productLabel = message.productLabel); + message.partNumber !== undefined && (obj.partNumber = message.partNumber); + message.commissioningCustomFlow !== undefined + && (obj.commissioningCustomFlow = Math.round(message.commissioningCustomFlow)); + message.commissioningCustomFlowUrl !== undefined + && (obj.commissioningCustomFlowUrl = message.commissioningCustomFlowUrl); + message.commissioningModeInitialStepsHint !== undefined + && (obj.commissioningModeInitialStepsHint = Math.round(message.commissioningModeInitialStepsHint)); + message.commissioningModeInitialStepsInstruction !== undefined + && (obj.commissioningModeInitialStepsInstruction = message.commissioningModeInitialStepsInstruction); + message.commissioningModeSecondaryStepsHint !== undefined + && (obj.commissioningModeSecondaryStepsHint = Math.round(message.commissioningModeSecondaryStepsHint)); + message.commissioningModeSecondaryStepsInstruction !== undefined + && (obj.commissioningModeSecondaryStepsInstruction = message.commissioningModeSecondaryStepsInstruction); + message.userManualUrl !== undefined && (obj.userManualUrl = message.userManualUrl); + message.supportUrl !== undefined && (obj.supportUrl = message.supportUrl); + message.productUrl !== undefined && (obj.productUrl = message.productUrl); + message.lsfUrl !== undefined && (obj.lsfUrl = message.lsfUrl); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + message.commissionerRemoteUiFlowUrl !== undefined + && (obj.commissionerRemoteUiFlowUrl = message.commissionerRemoteUiFlowUrl); + return obj; + }, + + fromPartial, I>>(object: I): MsgCreateModel { + const message = createBaseMsgCreateModel(); + message.creator = object.creator ?? ""; + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.deviceTypeId = object.deviceTypeId ?? 0; + message.productName = object.productName ?? ""; + message.productLabel = object.productLabel ?? ""; + message.partNumber = object.partNumber ?? ""; + message.commissioningCustomFlow = object.commissioningCustomFlow ?? 0; + message.commissioningCustomFlowUrl = object.commissioningCustomFlowUrl ?? ""; + message.commissioningModeInitialStepsHint = object.commissioningModeInitialStepsHint ?? 0; + message.commissioningModeInitialStepsInstruction = object.commissioningModeInitialStepsInstruction ?? ""; + message.commissioningModeSecondaryStepsHint = object.commissioningModeSecondaryStepsHint ?? 0; + message.commissioningModeSecondaryStepsInstruction = object.commissioningModeSecondaryStepsInstruction ?? ""; + message.userManualUrl = object.userManualUrl ?? ""; + message.supportUrl = object.supportUrl ?? ""; + message.productUrl = object.productUrl ?? ""; + message.lsfUrl = object.lsfUrl ?? ""; + message.schemaVersion = object.schemaVersion ?? 0; + message.commissionerRemoteUiFlowUrl = object.commissionerRemoteUiFlowUrl ?? ""; + return message; + }, +}; + +function createBaseMsgCreateModelResponse(): MsgCreateModelResponse { + return {}; +} + +export const MsgCreateModelResponse = { + encode(_: MsgCreateModelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateModelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateModelResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgCreateModelResponse { + return {}; + }, + + toJSON(_: MsgCreateModelResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgCreateModelResponse { + const message = createBaseMsgCreateModelResponse(); + return message; + }, +}; + +function createBaseMsgUpdateModel(): MsgUpdateModel { + return { + creator: "", + vid: 0, + pid: 0, + productName: "", + productLabel: "", + partNumber: "", + commissioningCustomFlowUrl: "", + commissioningModeInitialStepsInstruction: "", + commissioningModeSecondaryStepsInstruction: "", + userManualUrl: "", + supportUrl: "", + productUrl: "", + lsfUrl: "", + lsfRevision: 0, + schemaVersion: 0, + commissionerRemoteUiFlowUrl: "", + commissioningModeInitialStepsHint: 0, + }; +} + +export const MsgUpdateModel = { + encode(message: MsgUpdateModel, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.vid !== 0) { + writer.uint32(16).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(24).int32(message.pid); + } + if (message.productName !== "") { + writer.uint32(34).string(message.productName); + } + if (message.productLabel !== "") { + writer.uint32(42).string(message.productLabel); + } + if (message.partNumber !== "") { + writer.uint32(50).string(message.partNumber); + } + if (message.commissioningCustomFlowUrl !== "") { + writer.uint32(58).string(message.commissioningCustomFlowUrl); + } + if (message.commissioningModeInitialStepsInstruction !== "") { + writer.uint32(66).string(message.commissioningModeInitialStepsInstruction); + } + if (message.commissioningModeSecondaryStepsInstruction !== "") { + writer.uint32(74).string(message.commissioningModeSecondaryStepsInstruction); + } + if (message.userManualUrl !== "") { + writer.uint32(82).string(message.userManualUrl); + } + if (message.supportUrl !== "") { + writer.uint32(90).string(message.supportUrl); + } + if (message.productUrl !== "") { + writer.uint32(98).string(message.productUrl); + } + if (message.lsfUrl !== "") { + writer.uint32(106).string(message.lsfUrl); + } + if (message.lsfRevision !== 0) { + writer.uint32(112).int32(message.lsfRevision); + } + if (message.schemaVersion !== 0) { + writer.uint32(120).uint32(message.schemaVersion); + } + if (message.commissionerRemoteUiFlowUrl !== "") { + writer.uint32(130).string(message.commissionerRemoteUiFlowUrl); + } + if (message.commissioningModeInitialStepsHint !== 0) { + writer.uint32(136).uint32(message.commissioningModeInitialStepsHint); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateModel { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateModel(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.vid = reader.int32(); + break; + case 3: + message.pid = reader.int32(); + break; + case 4: + message.productName = reader.string(); + break; + case 5: + message.productLabel = reader.string(); + break; + case 6: + message.partNumber = reader.string(); + break; + case 7: + message.commissioningCustomFlowUrl = reader.string(); + break; + case 8: + message.commissioningModeInitialStepsInstruction = reader.string(); + break; + case 9: + message.commissioningModeSecondaryStepsInstruction = reader.string(); + break; + case 10: + message.userManualUrl = reader.string(); + break; + case 11: + message.supportUrl = reader.string(); + break; + case 12: + message.productUrl = reader.string(); + break; + case 13: + message.lsfUrl = reader.string(); + break; + case 14: + message.lsfRevision = reader.int32(); + break; + case 15: + message.schemaVersion = reader.uint32(); + break; + case 16: + message.commissionerRemoteUiFlowUrl = reader.string(); + break; + case 17: + message.commissioningModeInitialStepsHint = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgUpdateModel { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + productName: isSet(object.productName) ? String(object.productName) : "", + productLabel: isSet(object.productLabel) ? String(object.productLabel) : "", + partNumber: isSet(object.partNumber) ? String(object.partNumber) : "", + commissioningCustomFlowUrl: isSet(object.commissioningCustomFlowUrl) + ? String(object.commissioningCustomFlowUrl) + : "", + commissioningModeInitialStepsInstruction: isSet(object.commissioningModeInitialStepsInstruction) + ? String(object.commissioningModeInitialStepsInstruction) + : "", + commissioningModeSecondaryStepsInstruction: isSet(object.commissioningModeSecondaryStepsInstruction) + ? String(object.commissioningModeSecondaryStepsInstruction) + : "", + userManualUrl: isSet(object.userManualUrl) ? String(object.userManualUrl) : "", + supportUrl: isSet(object.supportUrl) ? String(object.supportUrl) : "", + productUrl: isSet(object.productUrl) ? String(object.productUrl) : "", + lsfUrl: isSet(object.lsfUrl) ? String(object.lsfUrl) : "", + lsfRevision: isSet(object.lsfRevision) ? Number(object.lsfRevision) : 0, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + commissionerRemoteUiFlowUrl: isSet(object.commissionerRemoteUiFlowUrl) + ? String(object.commissionerRemoteUiFlowUrl) + : "", + commissioningModeInitialStepsHint: isSet(object.commissioningModeInitialStepsHint) + ? Number(object.commissioningModeInitialStepsHint) + : 0, + }; + }, + + toJSON(message: MsgUpdateModel): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.productName !== undefined && (obj.productName = message.productName); + message.productLabel !== undefined && (obj.productLabel = message.productLabel); + message.partNumber !== undefined && (obj.partNumber = message.partNumber); + message.commissioningCustomFlowUrl !== undefined + && (obj.commissioningCustomFlowUrl = message.commissioningCustomFlowUrl); + message.commissioningModeInitialStepsInstruction !== undefined + && (obj.commissioningModeInitialStepsInstruction = message.commissioningModeInitialStepsInstruction); + message.commissioningModeSecondaryStepsInstruction !== undefined + && (obj.commissioningModeSecondaryStepsInstruction = message.commissioningModeSecondaryStepsInstruction); + message.userManualUrl !== undefined && (obj.userManualUrl = message.userManualUrl); + message.supportUrl !== undefined && (obj.supportUrl = message.supportUrl); + message.productUrl !== undefined && (obj.productUrl = message.productUrl); + message.lsfUrl !== undefined && (obj.lsfUrl = message.lsfUrl); + message.lsfRevision !== undefined && (obj.lsfRevision = Math.round(message.lsfRevision)); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + message.commissionerRemoteUiFlowUrl !== undefined + && (obj.commissionerRemoteUiFlowUrl = message.commissionerRemoteUiFlowUrl); + message.commissioningModeInitialStepsHint !== undefined + && (obj.commissioningModeInitialStepsHint = Math.round(message.commissioningModeInitialStepsHint)); + return obj; + }, + + fromPartial, I>>(object: I): MsgUpdateModel { + const message = createBaseMsgUpdateModel(); + message.creator = object.creator ?? ""; + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.productName = object.productName ?? ""; + message.productLabel = object.productLabel ?? ""; + message.partNumber = object.partNumber ?? ""; + message.commissioningCustomFlowUrl = object.commissioningCustomFlowUrl ?? ""; + message.commissioningModeInitialStepsInstruction = object.commissioningModeInitialStepsInstruction ?? ""; + message.commissioningModeSecondaryStepsInstruction = object.commissioningModeSecondaryStepsInstruction ?? ""; + message.userManualUrl = object.userManualUrl ?? ""; + message.supportUrl = object.supportUrl ?? ""; + message.productUrl = object.productUrl ?? ""; + message.lsfUrl = object.lsfUrl ?? ""; + message.lsfRevision = object.lsfRevision ?? 0; + message.schemaVersion = object.schemaVersion ?? 0; + message.commissionerRemoteUiFlowUrl = object.commissionerRemoteUiFlowUrl ?? ""; + message.commissioningModeInitialStepsHint = object.commissioningModeInitialStepsHint ?? 0; + return message; + }, +}; + +function createBaseMsgUpdateModelResponse(): MsgUpdateModelResponse { + return {}; +} + +export const MsgUpdateModelResponse = { + encode(_: MsgUpdateModelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateModelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateModelResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgUpdateModelResponse { + return {}; + }, + + toJSON(_: MsgUpdateModelResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgUpdateModelResponse { + const message = createBaseMsgUpdateModelResponse(); + return message; + }, +}; + +function createBaseMsgDeleteModel(): MsgDeleteModel { + return { creator: "", vid: 0, pid: 0 }; +} + +export const MsgDeleteModel = { + encode(message: MsgDeleteModel, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.vid !== 0) { + writer.uint32(16).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(24).int32(message.pid); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDeleteModel { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDeleteModel(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.vid = reader.int32(); + break; + case 3: + message.pid = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgDeleteModel { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + }; + }, + + toJSON(message: MsgDeleteModel): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + return obj; + }, + + fromPartial, I>>(object: I): MsgDeleteModel { + const message = createBaseMsgDeleteModel(); + message.creator = object.creator ?? ""; + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + return message; + }, +}; + +function createBaseMsgDeleteModelResponse(): MsgDeleteModelResponse { + return {}; +} + +export const MsgDeleteModelResponse = { + encode(_: MsgDeleteModelResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDeleteModelResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDeleteModelResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgDeleteModelResponse { + return {}; + }, + + toJSON(_: MsgDeleteModelResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgDeleteModelResponse { + const message = createBaseMsgDeleteModelResponse(); + return message; + }, +}; + +function createBaseMsgCreateModelVersion(): MsgCreateModelVersion { + return { + creator: "", + vid: 0, + pid: 0, + softwareVersion: 0, + softwareVersionString: "", + cdVersionNumber: 0, + firmwareInformation: "", + softwareVersionValid: false, + otaUrl: "", + otaFileSize: 0, + otaChecksum: "", + otaChecksumType: 0, + minApplicableSoftwareVersion: 0, + maxApplicableSoftwareVersion: 0, + releaseNotesUrl: "", + schemaVersion: 0, + }; +} + +export const MsgCreateModelVersion = { + encode(message: MsgCreateModelVersion, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.vid !== 0) { + writer.uint32(16).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(24).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(32).uint32(message.softwareVersion); + } + if (message.softwareVersionString !== "") { + writer.uint32(42).string(message.softwareVersionString); + } + if (message.cdVersionNumber !== 0) { + writer.uint32(48).int32(message.cdVersionNumber); + } + if (message.firmwareInformation !== "") { + writer.uint32(58).string(message.firmwareInformation); + } + if (message.softwareVersionValid === true) { + writer.uint32(64).bool(message.softwareVersionValid); + } + if (message.otaUrl !== "") { + writer.uint32(74).string(message.otaUrl); + } + if (message.otaFileSize !== 0) { + writer.uint32(80).uint64(message.otaFileSize); + } + if (message.otaChecksum !== "") { + writer.uint32(90).string(message.otaChecksum); + } + if (message.otaChecksumType !== 0) { + writer.uint32(96).int32(message.otaChecksumType); + } + if (message.minApplicableSoftwareVersion !== 0) { + writer.uint32(104).uint32(message.minApplicableSoftwareVersion); + } + if (message.maxApplicableSoftwareVersion !== 0) { + writer.uint32(112).uint32(message.maxApplicableSoftwareVersion); + } + if (message.releaseNotesUrl !== "") { + writer.uint32(122).string(message.releaseNotesUrl); + } + if (message.schemaVersion !== 0) { + writer.uint32(128).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateModelVersion { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateModelVersion(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.vid = reader.int32(); + break; + case 3: + message.pid = reader.int32(); + break; + case 4: + message.softwareVersion = reader.uint32(); + break; + case 5: + message.softwareVersionString = reader.string(); + break; + case 6: + message.cdVersionNumber = reader.int32(); + break; + case 7: + message.firmwareInformation = reader.string(); + break; + case 8: + message.softwareVersionValid = reader.bool(); + break; + case 9: + message.otaUrl = reader.string(); + break; + case 10: + message.otaFileSize = longToNumber(reader.uint64() as Long); + break; + case 11: + message.otaChecksum = reader.string(); + break; + case 12: + message.otaChecksumType = reader.int32(); + break; + case 13: + message.minApplicableSoftwareVersion = reader.uint32(); + break; + case 14: + message.maxApplicableSoftwareVersion = reader.uint32(); + break; + case 15: + message.releaseNotesUrl = reader.string(); + break; + case 16: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgCreateModelVersion { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + softwareVersionString: isSet(object.softwareVersionString) ? String(object.softwareVersionString) : "", + cdVersionNumber: isSet(object.cdVersionNumber) ? Number(object.cdVersionNumber) : 0, + firmwareInformation: isSet(object.firmwareInformation) ? String(object.firmwareInformation) : "", + softwareVersionValid: isSet(object.softwareVersionValid) ? Boolean(object.softwareVersionValid) : false, + otaUrl: isSet(object.otaUrl) ? String(object.otaUrl) : "", + otaFileSize: isSet(object.otaFileSize) ? Number(object.otaFileSize) : 0, + otaChecksum: isSet(object.otaChecksum) ? String(object.otaChecksum) : "", + otaChecksumType: isSet(object.otaChecksumType) ? Number(object.otaChecksumType) : 0, + minApplicableSoftwareVersion: isSet(object.minApplicableSoftwareVersion) + ? Number(object.minApplicableSoftwareVersion) + : 0, + maxApplicableSoftwareVersion: isSet(object.maxApplicableSoftwareVersion) + ? Number(object.maxApplicableSoftwareVersion) + : 0, + releaseNotesUrl: isSet(object.releaseNotesUrl) ? String(object.releaseNotesUrl) : "", + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgCreateModelVersion): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.softwareVersionString !== undefined && (obj.softwareVersionString = message.softwareVersionString); + message.cdVersionNumber !== undefined && (obj.cdVersionNumber = Math.round(message.cdVersionNumber)); + message.firmwareInformation !== undefined && (obj.firmwareInformation = message.firmwareInformation); + message.softwareVersionValid !== undefined && (obj.softwareVersionValid = message.softwareVersionValid); + message.otaUrl !== undefined && (obj.otaUrl = message.otaUrl); + message.otaFileSize !== undefined && (obj.otaFileSize = Math.round(message.otaFileSize)); + message.otaChecksum !== undefined && (obj.otaChecksum = message.otaChecksum); + message.otaChecksumType !== undefined && (obj.otaChecksumType = Math.round(message.otaChecksumType)); + message.minApplicableSoftwareVersion !== undefined + && (obj.minApplicableSoftwareVersion = Math.round(message.minApplicableSoftwareVersion)); + message.maxApplicableSoftwareVersion !== undefined + && (obj.maxApplicableSoftwareVersion = Math.round(message.maxApplicableSoftwareVersion)); + message.releaseNotesUrl !== undefined && (obj.releaseNotesUrl = message.releaseNotesUrl); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgCreateModelVersion { + const message = createBaseMsgCreateModelVersion(); + message.creator = object.creator ?? ""; + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.softwareVersionString = object.softwareVersionString ?? ""; + message.cdVersionNumber = object.cdVersionNumber ?? 0; + message.firmwareInformation = object.firmwareInformation ?? ""; + message.softwareVersionValid = object.softwareVersionValid ?? false; + message.otaUrl = object.otaUrl ?? ""; + message.otaFileSize = object.otaFileSize ?? 0; + message.otaChecksum = object.otaChecksum ?? ""; + message.otaChecksumType = object.otaChecksumType ?? 0; + message.minApplicableSoftwareVersion = object.minApplicableSoftwareVersion ?? 0; + message.maxApplicableSoftwareVersion = object.maxApplicableSoftwareVersion ?? 0; + message.releaseNotesUrl = object.releaseNotesUrl ?? ""; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgCreateModelVersionResponse(): MsgCreateModelVersionResponse { + return {}; +} + +export const MsgCreateModelVersionResponse = { + encode(_: MsgCreateModelVersionResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateModelVersionResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateModelVersionResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgCreateModelVersionResponse { + return {}; + }, + + toJSON(_: MsgCreateModelVersionResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgCreateModelVersionResponse { + const message = createBaseMsgCreateModelVersionResponse(); + return message; + }, +}; + +function createBaseMsgUpdateModelVersion(): MsgUpdateModelVersion { + return { + creator: "", + vid: 0, + pid: 0, + softwareVersion: 0, + softwareVersionValid: false, + otaUrl: "", + minApplicableSoftwareVersion: 0, + maxApplicableSoftwareVersion: 0, + releaseNotesUrl: "", + otaFileSize: 0, + otaChecksum: "", + schemaVersion: 0, + }; +} + +export const MsgUpdateModelVersion = { + encode(message: MsgUpdateModelVersion, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.vid !== 0) { + writer.uint32(16).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(24).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(32).uint32(message.softwareVersion); + } + if (message.softwareVersionValid === true) { + writer.uint32(40).bool(message.softwareVersionValid); + } + if (message.otaUrl !== "") { + writer.uint32(50).string(message.otaUrl); + } + if (message.minApplicableSoftwareVersion !== 0) { + writer.uint32(56).uint32(message.minApplicableSoftwareVersion); + } + if (message.maxApplicableSoftwareVersion !== 0) { + writer.uint32(64).uint32(message.maxApplicableSoftwareVersion); + } + if (message.releaseNotesUrl !== "") { + writer.uint32(74).string(message.releaseNotesUrl); + } + if (message.otaFileSize !== 0) { + writer.uint32(80).uint64(message.otaFileSize); + } + if (message.otaChecksum !== "") { + writer.uint32(90).string(message.otaChecksum); + } + if (message.schemaVersion !== 0) { + writer.uint32(96).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateModelVersion { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateModelVersion(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.vid = reader.int32(); + break; + case 3: + message.pid = reader.int32(); + break; + case 4: + message.softwareVersion = reader.uint32(); + break; + case 5: + message.softwareVersionValid = reader.bool(); + break; + case 6: + message.otaUrl = reader.string(); + break; + case 7: + message.minApplicableSoftwareVersion = reader.uint32(); + break; + case 8: + message.maxApplicableSoftwareVersion = reader.uint32(); + break; + case 9: + message.releaseNotesUrl = reader.string(); + break; + case 10: + message.otaFileSize = longToNumber(reader.uint64() as Long); + break; + case 11: + message.otaChecksum = reader.string(); + break; + case 12: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgUpdateModelVersion { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + softwareVersionValid: isSet(object.softwareVersionValid) ? Boolean(object.softwareVersionValid) : false, + otaUrl: isSet(object.otaUrl) ? String(object.otaUrl) : "", + minApplicableSoftwareVersion: isSet(object.minApplicableSoftwareVersion) + ? Number(object.minApplicableSoftwareVersion) + : 0, + maxApplicableSoftwareVersion: isSet(object.maxApplicableSoftwareVersion) + ? Number(object.maxApplicableSoftwareVersion) + : 0, + releaseNotesUrl: isSet(object.releaseNotesUrl) ? String(object.releaseNotesUrl) : "", + otaFileSize: isSet(object.otaFileSize) ? Number(object.otaFileSize) : 0, + otaChecksum: isSet(object.otaChecksum) ? String(object.otaChecksum) : "", + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgUpdateModelVersion): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + message.softwareVersionValid !== undefined && (obj.softwareVersionValid = message.softwareVersionValid); + message.otaUrl !== undefined && (obj.otaUrl = message.otaUrl); + message.minApplicableSoftwareVersion !== undefined + && (obj.minApplicableSoftwareVersion = Math.round(message.minApplicableSoftwareVersion)); + message.maxApplicableSoftwareVersion !== undefined + && (obj.maxApplicableSoftwareVersion = Math.round(message.maxApplicableSoftwareVersion)); + message.releaseNotesUrl !== undefined && (obj.releaseNotesUrl = message.releaseNotesUrl); + message.otaFileSize !== undefined && (obj.otaFileSize = Math.round(message.otaFileSize)); + message.otaChecksum !== undefined && (obj.otaChecksum = message.otaChecksum); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgUpdateModelVersion { + const message = createBaseMsgUpdateModelVersion(); + message.creator = object.creator ?? ""; + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + message.softwareVersionValid = object.softwareVersionValid ?? false; + message.otaUrl = object.otaUrl ?? ""; + message.minApplicableSoftwareVersion = object.minApplicableSoftwareVersion ?? 0; + message.maxApplicableSoftwareVersion = object.maxApplicableSoftwareVersion ?? 0; + message.releaseNotesUrl = object.releaseNotesUrl ?? ""; + message.otaFileSize = object.otaFileSize ?? 0; + message.otaChecksum = object.otaChecksum ?? ""; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgUpdateModelVersionResponse(): MsgUpdateModelVersionResponse { + return {}; +} + +export const MsgUpdateModelVersionResponse = { + encode(_: MsgUpdateModelVersionResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateModelVersionResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateModelVersionResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgUpdateModelVersionResponse { + return {}; + }, + + toJSON(_: MsgUpdateModelVersionResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgUpdateModelVersionResponse { + const message = createBaseMsgUpdateModelVersionResponse(); + return message; + }, +}; + +function createBaseMsgDeleteModelVersion(): MsgDeleteModelVersion { + return { creator: "", vid: 0, pid: 0, softwareVersion: 0 }; +} + +export const MsgDeleteModelVersion = { + encode(message: MsgDeleteModelVersion, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.vid !== 0) { + writer.uint32(16).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(24).int32(message.pid); + } + if (message.softwareVersion !== 0) { + writer.uint32(32).uint32(message.softwareVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDeleteModelVersion { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDeleteModelVersion(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.vid = reader.int32(); + break; + case 3: + message.pid = reader.int32(); + break; + case 4: + message.softwareVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgDeleteModelVersion { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + softwareVersion: isSet(object.softwareVersion) ? Number(object.softwareVersion) : 0, + }; + }, + + toJSON(message: MsgDeleteModelVersion): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.softwareVersion !== undefined && (obj.softwareVersion = Math.round(message.softwareVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgDeleteModelVersion { + const message = createBaseMsgDeleteModelVersion(); + message.creator = object.creator ?? ""; + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.softwareVersion = object.softwareVersion ?? 0; + return message; + }, +}; + +function createBaseMsgDeleteModelVersionResponse(): MsgDeleteModelVersionResponse { + return {}; +} + +export const MsgDeleteModelVersionResponse = { + encode(_: MsgDeleteModelVersionResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDeleteModelVersionResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDeleteModelVersionResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgDeleteModelVersionResponse { + return {}; + }, + + toJSON(_: MsgDeleteModelVersionResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgDeleteModelVersionResponse { + const message = createBaseMsgDeleteModelVersionResponse(); + return message; + }, +}; + +/** Msg defines the Msg service. */ +export interface Msg { + CreateModel(request: MsgCreateModel): Promise; + UpdateModel(request: MsgUpdateModel): Promise; + DeleteModel(request: MsgDeleteModel): Promise; + CreateModelVersion(request: MsgCreateModelVersion): Promise; + UpdateModelVersion(request: MsgUpdateModelVersion): Promise; + /** this line is used by starport scaffolding # proto/tx/rpc */ + DeleteModelVersion(request: MsgDeleteModelVersion): Promise; +} + +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.CreateModel = this.CreateModel.bind(this); + this.UpdateModel = this.UpdateModel.bind(this); + this.DeleteModel = this.DeleteModel.bind(this); + this.CreateModelVersion = this.CreateModelVersion.bind(this); + this.UpdateModelVersion = this.UpdateModelVersion.bind(this); + this.DeleteModelVersion = this.DeleteModelVersion.bind(this); + } + CreateModel(request: MsgCreateModel): Promise { + const data = MsgCreateModel.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.model.Msg", "CreateModel", data); + return promise.then((data) => MsgCreateModelResponse.decode(new _m0.Reader(data))); + } + + UpdateModel(request: MsgUpdateModel): Promise { + const data = MsgUpdateModel.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.model.Msg", "UpdateModel", data); + return promise.then((data) => MsgUpdateModelResponse.decode(new _m0.Reader(data))); + } + + DeleteModel(request: MsgDeleteModel): Promise { + const data = MsgDeleteModel.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.model.Msg", "DeleteModel", data); + return promise.then((data) => MsgDeleteModelResponse.decode(new _m0.Reader(data))); + } + + CreateModelVersion(request: MsgCreateModelVersion): Promise { + const data = MsgCreateModelVersion.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.model.Msg", + "CreateModelVersion", + data, + ); + return promise.then((data) => MsgCreateModelVersionResponse.decode(new _m0.Reader(data))); + } + + UpdateModelVersion(request: MsgUpdateModelVersion): Promise { + const data = MsgUpdateModelVersion.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.model.Msg", + "UpdateModelVersion", + data, + ); + return promise.then((data) => MsgUpdateModelVersionResponse.decode(new _m0.Reader(data))); + } + + DeleteModelVersion(request: MsgDeleteModelVersion): Promise { + const data = MsgDeleteModelVersion.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.model.Msg", + "DeleteModelVersion", + data, + ); + return promise.then((data) => MsgDeleteModelVersionResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/vendor_products.ts b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/vendor_products.ts new file mode 100644 index 000000000..2926070cf --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.model/types/zigbeealliance/distributedcomplianceledger/model/vendor_products.ts @@ -0,0 +1,87 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Product } from "./product"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.model"; + +export interface VendorProducts { + vid: number; + products: Product[]; +} + +function createBaseVendorProducts(): VendorProducts { + return { vid: 0, products: [] }; +} + +export const VendorProducts = { + encode(message: VendorProducts, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + for (const v of message.products) { + Product.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): VendorProducts { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVendorProducts(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.products.push(Product.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): VendorProducts { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + products: Array.isArray(object?.products) ? object.products.map((e: any) => Product.fromJSON(e)) : [], + }; + }, + + toJSON(message: VendorProducts): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + if (message.products) { + obj.products = message.products.map((e) => e ? Product.toJSON(e) : undefined); + } else { + obj.products = []; + } + return obj; + }, + + fromPartial, I>>(object: I): VendorProducts { + const message = createBaseVendorProducts(); + message.vid = object.vid ?? 0; + message.products = object.products?.map((e) => Product.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/api.swagger.yml b/ts-client/zigbeealliance.distributedcomplianceledger.pki/api.swagger.yml new file mode 100644 index 000000000..4dcf819c0 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/api.swagger.yml @@ -0,0 +1,4244 @@ +swagger: '2.0' +info: + title: HTTP API Console zigbeealliance.distributedcomplianceledger.pki + name: '' + description: '' +paths: + /dcl/pki/certificates: + get: + operationId: ApprovedCertificatesAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + approvedCertificates: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + - name: subjectKeyId + in: query + required: false + type: string + tags: + - Query + /dcl/pki/certificates/{subject}: + get: + operationId: ApprovedCertificatesBySubject + responses: + '200': + description: A successful response. + schema: + type: object + properties: + approvedCertificatesBySubject: + type: object + properties: + subject: + type: string + subjectKeyIds: + type: array + items: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: subject + in: path + required: true + type: string + tags: + - Query + /dcl/pki/certificates/{subject}/{subjectKeyId}: + get: + operationId: ApprovedCertificates + responses: + '200': + description: A successful response. + schema: + type: object + properties: + approvedCertificates: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: subject + in: path + required: true + type: string + - name: subjectKeyId + in: path + required: true + type: string + tags: + - Query + /dcl/pki/child-certificates/{issuer}/{authorityKeyId}: + get: + operationId: ChildCertificates + responses: + '200': + description: A successful response. + schema: + type: object + properties: + childCertificates: + type: object + properties: + issuer: + type: string + authorityKeyId: + type: string + certIds: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: issuer + in: path + required: true + type: string + - name: authorityKeyId + in: path + required: true + type: string + tags: + - Query + /dcl/pki/noc-certificates: + get: + operationId: NocCertificatesAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + nocCertificates: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/pki/noc-certificates/{vid}: + get: + operationId: NocCertificates + responses: + '200': + description: A successful response. + schema: + type: object + properties: + nocCertificates: + type: object + properties: + vid: + type: integer + format: int32 + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: vid + in: path + required: true + type: integer + format: int32 + tags: + - Query + /dcl/pki/noc-root-certificates: + get: + operationId: NocRootCertificatesAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + nocRootCertificates: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/pki/noc-root-certificates/{vid}: + get: + operationId: NocRootCertificates + responses: + '200': + description: A successful response. + schema: + type: object + properties: + nocRootCertificates: + type: object + properties: + vid: + type: integer + format: int32 + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: vid + in: path + required: true + type: integer + format: int32 + tags: + - Query + /dcl/pki/proposed-certificates: + get: + operationId: ProposedCertificateAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + proposedCertificate: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + pemCert: + type: string + serialNumber: + type: string + owner: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + certSchemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/pki/proposed-certificates/{subject}/{subjectKeyId}: + get: + operationId: ProposedCertificate + responses: + '200': + description: A successful response. + schema: + type: object + properties: + proposedCertificate: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + pemCert: + type: string + serialNumber: + type: string + owner: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + certSchemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: subject + in: path + required: true + type: string + - name: subjectKeyId + in: path + required: true + type: string + tags: + - Query + /dcl/pki/proposed-revocation-certificates: + get: + operationId: ProposedCertificateRevocationAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + proposedCertificateRevocation: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + serialNumber: + type: string + revokeChild: + type: boolean + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/pki/proposed-revocation-certificates/{subject}/{subjectKeyId}: + get: + operationId: ProposedCertificateRevocation + responses: + '200': + description: A successful response. + schema: + type: object + properties: + proposedCertificateRevocation: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + serialNumber: + type: string + revokeChild: + type: boolean + schemaVersion: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: subject + in: path + required: true + type: string + - name: subjectKeyId + in: path + required: true + type: string + - name: serialNumber + in: query + required: false + type: string + tags: + - Query + /dcl/pki/rejected-certificates: + get: + operationId: RejectedCertificateAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + rejectedCertificate: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/pki/rejected-certificates/{subject}/{subjectKeyId}: + get: + operationId: RejectedCertificate + responses: + '200': + description: A successful response. + schema: + type: object + properties: + rejectedCertificate: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: subject + in: path + required: true + type: string + - name: subjectKeyId + in: path + required: true + type: string + tags: + - Query + /dcl/pki/revocation-points: + get: + operationId: PkiRevocationDistributionPointAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + PkiRevocationDistributionPoint: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + label: + type: string + issuerSubjectKeyID: + type: string + pid: + type: integer + format: int32 + isPAA: + type: boolean + crlSignerCertificate: + type: string + dataURL: + type: string + dataFileSize: + type: string + format: uint64 + dataDigest: + type: string + dataDigestType: + type: integer + format: int64 + revocationType: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/pki/revocation-points/{issuerSubjectKeyID}: + get: + operationId: PkiRevocationDistributionPointsByIssuerSubjectKeyID + responses: + '200': + description: A successful response. + schema: + type: object + properties: + pkiRevocationDistributionPointsByIssuerSubjectKeyID: + type: object + properties: + issuerSubjectKeyID: + type: string + points: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + label: + type: string + issuerSubjectKeyID: + type: string + pid: + type: integer + format: int32 + isPAA: + type: boolean + crlSignerCertificate: + type: string + dataURL: + type: string + dataFileSize: + type: string + format: uint64 + dataDigest: + type: string + dataDigestType: + type: integer + format: int64 + revocationType: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: issuerSubjectKeyID + in: path + required: true + type: string + tags: + - Query + /dcl/pki/revocation-points/{issuerSubjectKeyID}/{vid}/{label}: + get: + operationId: PkiRevocationDistributionPoint + responses: + '200': + description: A successful response. + schema: + type: object + properties: + PkiRevocationDistributionPoint: + type: object + properties: + vid: + type: integer + format: int32 + label: + type: string + issuerSubjectKeyID: + type: string + pid: + type: integer + format: int32 + isPAA: + type: boolean + crlSignerCertificate: + type: string + dataURL: + type: string + dataFileSize: + type: string + format: uint64 + dataDigest: + type: string + dataDigestType: + type: integer + format: int64 + revocationType: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: issuerSubjectKeyID + in: path + required: true + type: string + - name: vid + in: path + required: true + type: integer + format: int32 + - name: label + in: path + required: true + type: string + tags: + - Query + /dcl/pki/revoked-certificates: + get: + operationId: RevokedCertificatesAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + revokedCertificates: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/pki/revoked-certificates/{subject}/{subjectKeyId}: + get: + operationId: RevokedCertificates + responses: + '200': + description: A successful response. + schema: + type: object + properties: + revokedCertificates: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: subject + in: path + required: true + type: string + - name: subjectKeyId + in: path + required: true + type: string + tags: + - Query + /dcl/pki/revoked-noc-root-certificates: + get: + operationId: RevokedNocRootCertificatesAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + revokedNocRootCertificates: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/pki/revoked-noc-root-certificates/{subject}/{subjectKeyId}: + get: + operationId: RevokedNocRootCertificates + responses: + '200': + description: A successful response. + schema: + type: object + properties: + revokedNocRootCertificates: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: subject + in: path + required: true + type: string + - name: subjectKeyId + in: path + required: true + type: string + tags: + - Query + /dcl/pki/revoked-root-certificates: + get: + operationId: RevokedRootCertificates + responses: + '200': + description: A successful response. + schema: + type: object + properties: + revokedRootCertificates: + type: object + properties: + certs: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + tags: + - Query + /dcl/pki/root-certificates: + get: + operationId: ApprovedRootCertificates + responses: + '200': + description: A successful response. + schema: + type: object + properties: + approvedRootCertificates: + type: object + properties: + certs: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + tags: + - Query +definitions: + Any: + type: object + properties: + '@type': + type: string + additionalProperties: {} + Status: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + Certificate: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + CertificateIdentifier: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + Grant: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + PageRequest: + type: object + properties: + key: + type: string + format: byte + offset: + type: string + format: uint64 + limit: + type: string + format: uint64 + count_total: + type: boolean + reverse: + type: boolean + PageResponse: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllApprovedCertificatesResponse: + type: object + properties: + approvedCertificates: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllNocCertificatesResponse: + type: object + properties: + nocCertificates: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllNocRootCertificatesResponse: + type: object + properties: + nocRootCertificates: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllPkiRevocationDistributionPointResponse: + type: object + properties: + PkiRevocationDistributionPoint: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + label: + type: string + issuerSubjectKeyID: + type: string + pid: + type: integer + format: int32 + isPAA: + type: boolean + crlSignerCertificate: + type: string + dataURL: + type: string + dataFileSize: + type: string + format: uint64 + dataDigest: + type: string + dataDigestType: + type: integer + format: int64 + revocationType: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllProposedCertificateResponse: + type: object + properties: + proposedCertificate: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + pemCert: + type: string + serialNumber: + type: string + owner: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + certSchemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllProposedCertificateRevocationResponse: + type: object + properties: + proposedCertificateRevocation: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + serialNumber: + type: string + revokeChild: + type: boolean + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllRejectedCertificatesResponse: + type: object + properties: + rejectedCertificate: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllRevokedCertificatesResponse: + type: object + properties: + revokedCertificates: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllRevokedNocRootCertificatesResponse: + type: object + properties: + revokedNocRootCertificates: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryGetApprovedCertificatesBySubjectResponse: + type: object + properties: + approvedCertificatesBySubject: + type: object + properties: + subject: + type: string + subjectKeyIds: + type: array + items: + type: string + QueryGetApprovedCertificatesResponse: + type: object + properties: + approvedCertificates: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + QueryGetApprovedRootCertificatesResponse: + type: object + properties: + approvedRootCertificates: + type: object + properties: + certs: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + QueryGetChildCertificatesResponse: + type: object + properties: + childCertificates: + type: object + properties: + issuer: + type: string + authorityKeyId: + type: string + certIds: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + QueryGetNocCertificatesResponse: + type: object + properties: + nocCertificates: + type: object + properties: + vid: + type: integer + format: int32 + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + QueryGetNocRootCertificatesResponse: + type: object + properties: + nocRootCertificates: + type: object + properties: + vid: + type: integer + format: int32 + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + QueryGetPkiRevocationDistributionPointResponse: + type: object + properties: + PkiRevocationDistributionPoint: + type: object + properties: + vid: + type: integer + format: int32 + label: + type: string + issuerSubjectKeyID: + type: string + pid: + type: integer + format: int32 + isPAA: + type: boolean + crlSignerCertificate: + type: string + dataURL: + type: string + dataFileSize: + type: string + format: uint64 + dataDigest: + type: string + dataDigestType: + type: integer + format: int64 + revocationType: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse: + type: object + properties: + pkiRevocationDistributionPointsByIssuerSubjectKeyID: + type: object + properties: + issuerSubjectKeyID: + type: string + points: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + label: + type: string + issuerSubjectKeyID: + type: string + pid: + type: integer + format: int32 + isPAA: + type: boolean + crlSignerCertificate: + type: string + dataURL: + type: string + dataFileSize: + type: string + format: uint64 + dataDigest: + type: string + dataDigestType: + type: integer + format: int64 + revocationType: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + QueryGetProposedCertificateResponse: + type: object + properties: + proposedCertificate: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + pemCert: + type: string + serialNumber: + type: string + owner: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + certSchemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + QueryGetProposedCertificateRevocationResponse: + type: object + properties: + proposedCertificateRevocation: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + serialNumber: + type: string + revokeChild: + type: boolean + schemaVersion: + type: integer + format: int64 + QueryGetRejectedCertificatesResponse: + type: object + properties: + rejectedCertificate: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + QueryGetRevokedCertificatesResponse: + type: object + properties: + revokedCertificates: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + QueryGetRevokedNocRootCertificatesResponse: + type: object + properties: + revokedNocRootCertificates: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + QueryGetRevokedRootCertificatesResponse: + type: object + properties: + revokedRootCertificates: + type: object + properties: + certs: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + pki.ApprovedCertificates: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pki.ApprovedCertificatesBySubject: + type: object + properties: + subject: + type: string + subjectKeyIds: + type: array + items: + type: string + pki.ApprovedRootCertificates: + type: object + properties: + certs: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + pki.ChildCertificates: + type: object + properties: + issuer: + type: string + authorityKeyId: + type: string + certIds: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + pki.NocCertificates: + type: object + properties: + vid: + type: integer + format: int32 + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + pki.NocRootCertificates: + type: object + properties: + vid: + type: integer + format: int32 + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + pki.PkiRevocationDistributionPoint: + type: object + properties: + vid: + type: integer + format: int32 + label: + type: string + issuerSubjectKeyID: + type: string + pid: + type: integer + format: int32 + isPAA: + type: boolean + crlSignerCertificate: + type: string + dataURL: + type: string + dataFileSize: + type: string + format: uint64 + dataDigest: + type: string + dataDigestType: + type: integer + format: int64 + revocationType: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pki.PkiRevocationDistributionPointsByIssuerSubjectKeyID: + type: object + properties: + issuerSubjectKeyID: + type: string + points: + type: array + items: + type: object + properties: + vid: + type: integer + format: int32 + label: + type: string + issuerSubjectKeyID: + type: string + pid: + type: integer + format: int32 + isPAA: + type: boolean + crlSignerCertificate: + type: string + dataURL: + type: string + dataFileSize: + type: string + format: uint64 + dataDigest: + type: string + dataDigestType: + type: integer + format: int64 + revocationType: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pki.ProposedCertificate: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + pemCert: + type: string + serialNumber: + type: string + owner: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + certSchemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pki.ProposedCertificateRevocation: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + serialNumber: + type: string + revokeChild: + type: boolean + schemaVersion: + type: integer + format: int64 + pki.RejectedCertificate: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pki.RevokedCertificates: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + schemaVersion: + type: integer + format: int64 + pki.RevokedNocRootCertificates: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + certs: + type: array + items: + type: object + properties: + pemCert: + type: string + serialNumber: + type: string + issuer: + type: string + authorityKeyId: + type: string + rootSubject: + type: string + rootSubjectKeyId: + type: string + isRoot: + type: boolean + owner: + type: string + subject: + type: string + subjectKeyId: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + subjectAsText: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + vid: + type: integer + format: int32 + isNoc: + type: boolean + schemaVersion: + type: integer + format: int64 + pki.RevokedRootCertificates: + type: object + properties: + certs: + type: array + items: + type: object + properties: + subject: + type: string + subjectKeyId: + type: string + MsgAddNocX509CertResponse: + type: object + MsgAddNocX509RootCertResponse: + type: object + MsgAddPkiRevocationDistributionPointResponse: + type: object + MsgAddX509CertResponse: + type: object + MsgApproveAddX509RootCertResponse: + type: object + MsgApproveRevokeX509RootCertResponse: + type: object + MsgAssignVidResponse: + type: object + MsgDeletePkiRevocationDistributionPointResponse: + type: object + MsgProposeAddX509RootCertResponse: + type: object + MsgProposeRevokeX509RootCertResponse: + type: object + MsgRejectAddX509RootCertResponse: + type: object + MsgRemoveX509CertResponse: + type: object + MsgRevokeNocRootX509CertResponse: + type: object + MsgRevokeNocX509CertResponse: + type: object + MsgRevokeX509CertResponse: + type: object + MsgUpdatePkiRevocationDistributionPointResponse: + type: object diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/index.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/module.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/module.ts new file mode 100755 index 000000000..f0d0e5fbd --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/module.ts @@ -0,0 +1,664 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; +import { MsgApproveRevokeX509RootCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgRemoveX509Cert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgRejectAddX509RootCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgAddX509Cert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgAddNocX509IcaCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgDeletePkiRevocationDistributionPoint } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgRevokeX509Cert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgRevokeNocX509RootCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgApproveAddX509RootCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgRevokeNocX509IcaCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgProposeRevokeX509RootCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgUpdatePkiRevocationDistributionPoint } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgAssignVid } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgAddNocX509RootCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgProposeAddX509RootCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgAddPkiRevocationDistributionPoint } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; + +import { ApprovedCertificates as typeApprovedCertificates} from "./types" +import { ApprovedCertificatesBySubject as typeApprovedCertificatesBySubject} from "./types" +import { ApprovedCertificatesBySubjectKeyId as typeApprovedCertificatesBySubjectKeyId} from "./types" +import { ApprovedRootCertificates as typeApprovedRootCertificates} from "./types" +import { Certificate as typeCertificate} from "./types" +import { CertificateIdentifier as typeCertificateIdentifier} from "./types" +import { ChildCertificates as typeChildCertificates} from "./types" +import { Grant as typeGrant} from "./types" +import { NocIcaCertificates as typeNocIcaCertificates} from "./types" +import { NocRootCertificates as typeNocRootCertificates} from "./types" +import { NocRootCertificatesByVidAndSkid as typeNocRootCertificatesByVidAndSkid} from "./types" +import { PkiRevocationDistributionPoint as typePkiRevocationDistributionPoint} from "./types" +import { PkiRevocationDistributionPointsByIssuerSubjectKeyID as typePkiRevocationDistributionPointsByIssuerSubjectKeyID} from "./types" +import { ProposedCertificate as typeProposedCertificate} from "./types" +import { ProposedCertificateRevocation as typeProposedCertificateRevocation} from "./types" +import { RejectedCertificate as typeRejectedCertificate} from "./types" +import { RevokedCertificates as typeRevokedCertificates} from "./types" +import { RevokedNocRootCertificates as typeRevokedNocRootCertificates} from "./types" +import { RevokedRootCertificates as typeRevokedRootCertificates} from "./types" +import { UniqueCertificate as typeUniqueCertificate} from "./types" + +export { MsgApproveRevokeX509RootCert, MsgRemoveX509Cert, MsgRejectAddX509RootCert, MsgAddX509Cert, MsgAddNocX509IcaCert, MsgDeletePkiRevocationDistributionPoint, MsgRevokeX509Cert, MsgRevokeNocX509RootCert, MsgApproveAddX509RootCert, MsgRevokeNocX509IcaCert, MsgProposeRevokeX509RootCert, MsgUpdatePkiRevocationDistributionPoint, MsgAssignVid, MsgAddNocX509RootCert, MsgProposeAddX509RootCert, MsgAddPkiRevocationDistributionPoint }; + +type sendMsgApproveRevokeX509RootCertParams = { + value: MsgApproveRevokeX509RootCert, + fee?: StdFee, + memo?: string +}; + +type sendMsgRemoveX509CertParams = { + value: MsgRemoveX509Cert, + fee?: StdFee, + memo?: string +}; + +type sendMsgRejectAddX509RootCertParams = { + value: MsgRejectAddX509RootCert, + fee?: StdFee, + memo?: string +}; + +type sendMsgAddX509CertParams = { + value: MsgAddX509Cert, + fee?: StdFee, + memo?: string +}; + +type sendMsgAddNocX509IcaCertParams = { + value: MsgAddNocX509IcaCert, + fee?: StdFee, + memo?: string +}; + +type sendMsgDeletePkiRevocationDistributionPointParams = { + value: MsgDeletePkiRevocationDistributionPoint, + fee?: StdFee, + memo?: string +}; + +type sendMsgRevokeX509CertParams = { + value: MsgRevokeX509Cert, + fee?: StdFee, + memo?: string +}; + +type sendMsgRevokeNocX509RootCertParams = { + value: MsgRevokeNocX509RootCert, + fee?: StdFee, + memo?: string +}; + +type sendMsgApproveAddX509RootCertParams = { + value: MsgApproveAddX509RootCert, + fee?: StdFee, + memo?: string +}; + +type sendMsgRevokeNocX509IcaCertParams = { + value: MsgRevokeNocX509IcaCert, + fee?: StdFee, + memo?: string +}; + +type sendMsgProposeRevokeX509RootCertParams = { + value: MsgProposeRevokeX509RootCert, + fee?: StdFee, + memo?: string +}; + +type sendMsgUpdatePkiRevocationDistributionPointParams = { + value: MsgUpdatePkiRevocationDistributionPoint, + fee?: StdFee, + memo?: string +}; + +type sendMsgAssignVidParams = { + value: MsgAssignVid, + fee?: StdFee, + memo?: string +}; + +type sendMsgAddNocX509RootCertParams = { + value: MsgAddNocX509RootCert, + fee?: StdFee, + memo?: string +}; + +type sendMsgProposeAddX509RootCertParams = { + value: MsgProposeAddX509RootCert, + fee?: StdFee, + memo?: string +}; + +type sendMsgAddPkiRevocationDistributionPointParams = { + value: MsgAddPkiRevocationDistributionPoint, + fee?: StdFee, + memo?: string +}; + + +type msgApproveRevokeX509RootCertParams = { + value: MsgApproveRevokeX509RootCert, +}; + +type msgRemoveX509CertParams = { + value: MsgRemoveX509Cert, +}; + +type msgRejectAddX509RootCertParams = { + value: MsgRejectAddX509RootCert, +}; + +type msgAddX509CertParams = { + value: MsgAddX509Cert, +}; + +type msgAddNocX509IcaCertParams = { + value: MsgAddNocX509IcaCert, +}; + +type msgDeletePkiRevocationDistributionPointParams = { + value: MsgDeletePkiRevocationDistributionPoint, +}; + +type msgRevokeX509CertParams = { + value: MsgRevokeX509Cert, +}; + +type msgRevokeNocX509RootCertParams = { + value: MsgRevokeNocX509RootCert, +}; + +type msgApproveAddX509RootCertParams = { + value: MsgApproveAddX509RootCert, +}; + +type msgRevokeNocX509IcaCertParams = { + value: MsgRevokeNocX509IcaCert, +}; + +type msgProposeRevokeX509RootCertParams = { + value: MsgProposeRevokeX509RootCert, +}; + +type msgUpdatePkiRevocationDistributionPointParams = { + value: MsgUpdatePkiRevocationDistributionPoint, +}; + +type msgAssignVidParams = { + value: MsgAssignVid, +}; + +type msgAddNocX509RootCertParams = { + value: MsgAddNocX509RootCert, +}; + +type msgProposeAddX509RootCertParams = { + value: MsgProposeAddX509RootCert, +}; + +type msgAddPkiRevocationDistributionPointParams = { + value: MsgAddPkiRevocationDistributionPoint, +}; + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + async sendMsgApproveRevokeX509RootCert({ value, fee, memo }: sendMsgApproveRevokeX509RootCertParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgApproveRevokeX509RootCert: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgApproveRevokeX509RootCert({ value: MsgApproveRevokeX509RootCert.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgApproveRevokeX509RootCert: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgRemoveX509Cert({ value, fee, memo }: sendMsgRemoveX509CertParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgRemoveX509Cert: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgRemoveX509Cert({ value: MsgRemoveX509Cert.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgRemoveX509Cert: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgRejectAddX509RootCert({ value, fee, memo }: sendMsgRejectAddX509RootCertParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgRejectAddX509RootCert: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgRejectAddX509RootCert({ value: MsgRejectAddX509RootCert.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgRejectAddX509RootCert: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgAddX509Cert({ value, fee, memo }: sendMsgAddX509CertParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgAddX509Cert: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgAddX509Cert({ value: MsgAddX509Cert.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgAddX509Cert: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgAddNocX509IcaCert({ value, fee, memo }: sendMsgAddNocX509IcaCertParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgAddNocX509IcaCert: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgAddNocX509IcaCert({ value: MsgAddNocX509IcaCert.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgAddNocX509IcaCert: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgDeletePkiRevocationDistributionPoint({ value, fee, memo }: sendMsgDeletePkiRevocationDistributionPointParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgDeletePkiRevocationDistributionPoint: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgDeletePkiRevocationDistributionPoint({ value: MsgDeletePkiRevocationDistributionPoint.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgDeletePkiRevocationDistributionPoint: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgRevokeX509Cert({ value, fee, memo }: sendMsgRevokeX509CertParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgRevokeX509Cert: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgRevokeX509Cert({ value: MsgRevokeX509Cert.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgRevokeX509Cert: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgRevokeNocX509RootCert({ value, fee, memo }: sendMsgRevokeNocX509RootCertParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgRevokeNocX509RootCert: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgRevokeNocX509RootCert({ value: MsgRevokeNocX509RootCert.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgRevokeNocX509RootCert: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgApproveAddX509RootCert({ value, fee, memo }: sendMsgApproveAddX509RootCertParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgApproveAddX509RootCert: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgApproveAddX509RootCert({ value: MsgApproveAddX509RootCert.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgApproveAddX509RootCert: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgRevokeNocX509IcaCert({ value, fee, memo }: sendMsgRevokeNocX509IcaCertParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgRevokeNocX509IcaCert: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgRevokeNocX509IcaCert({ value: MsgRevokeNocX509IcaCert.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgRevokeNocX509IcaCert: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgProposeRevokeX509RootCert({ value, fee, memo }: sendMsgProposeRevokeX509RootCertParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgProposeRevokeX509RootCert: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgProposeRevokeX509RootCert({ value: MsgProposeRevokeX509RootCert.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgProposeRevokeX509RootCert: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgUpdatePkiRevocationDistributionPoint({ value, fee, memo }: sendMsgUpdatePkiRevocationDistributionPointParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgUpdatePkiRevocationDistributionPoint: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgUpdatePkiRevocationDistributionPoint({ value: MsgUpdatePkiRevocationDistributionPoint.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgUpdatePkiRevocationDistributionPoint: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgAssignVid({ value, fee, memo }: sendMsgAssignVidParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgAssignVid: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgAssignVid({ value: MsgAssignVid.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgAssignVid: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgAddNocX509RootCert({ value, fee, memo }: sendMsgAddNocX509RootCertParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgAddNocX509RootCert: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgAddNocX509RootCert({ value: MsgAddNocX509RootCert.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgAddNocX509RootCert: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgProposeAddX509RootCert({ value, fee, memo }: sendMsgProposeAddX509RootCertParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgProposeAddX509RootCert: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgProposeAddX509RootCert({ value: MsgProposeAddX509RootCert.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgProposeAddX509RootCert: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgAddPkiRevocationDistributionPoint({ value, fee, memo }: sendMsgAddPkiRevocationDistributionPointParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgAddPkiRevocationDistributionPoint: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgAddPkiRevocationDistributionPoint({ value: MsgAddPkiRevocationDistributionPoint.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgAddPkiRevocationDistributionPoint: Could not broadcast Tx: '+ e.message) + } + }, + + + msgApproveRevokeX509RootCert({ value }: msgApproveRevokeX509RootCertParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgApproveRevokeX509RootCert", value: MsgApproveRevokeX509RootCert.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgApproveRevokeX509RootCert: Could not create message: ' + e.message) + } + }, + + msgRemoveX509Cert({ value }: msgRemoveX509CertParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgRemoveX509Cert", value: MsgRemoveX509Cert.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgRemoveX509Cert: Could not create message: ' + e.message) + } + }, + + msgRejectAddX509RootCert({ value }: msgRejectAddX509RootCertParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgRejectAddX509RootCert", value: MsgRejectAddX509RootCert.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgRejectAddX509RootCert: Could not create message: ' + e.message) + } + }, + + msgAddX509Cert({ value }: msgAddX509CertParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgAddX509Cert", value: MsgAddX509Cert.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgAddX509Cert: Could not create message: ' + e.message) + } + }, + + msgAddNocX509IcaCert({ value }: msgAddNocX509IcaCertParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgAddNocX509IcaCert", value: MsgAddNocX509IcaCert.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgAddNocX509IcaCert: Could not create message: ' + e.message) + } + }, + + msgDeletePkiRevocationDistributionPoint({ value }: msgDeletePkiRevocationDistributionPointParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgDeletePkiRevocationDistributionPoint", value: MsgDeletePkiRevocationDistributionPoint.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgDeletePkiRevocationDistributionPoint: Could not create message: ' + e.message) + } + }, + + msgRevokeX509Cert({ value }: msgRevokeX509CertParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgRevokeX509Cert", value: MsgRevokeX509Cert.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgRevokeX509Cert: Could not create message: ' + e.message) + } + }, + + msgRevokeNocX509RootCert({ value }: msgRevokeNocX509RootCertParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgRevokeNocX509RootCert", value: MsgRevokeNocX509RootCert.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgRevokeNocX509RootCert: Could not create message: ' + e.message) + } + }, + + msgApproveAddX509RootCert({ value }: msgApproveAddX509RootCertParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgApproveAddX509RootCert", value: MsgApproveAddX509RootCert.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgApproveAddX509RootCert: Could not create message: ' + e.message) + } + }, + + msgRevokeNocX509IcaCert({ value }: msgRevokeNocX509IcaCertParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgRevokeNocX509IcaCert", value: MsgRevokeNocX509IcaCert.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgRevokeNocX509IcaCert: Could not create message: ' + e.message) + } + }, + + msgProposeRevokeX509RootCert({ value }: msgProposeRevokeX509RootCertParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgProposeRevokeX509RootCert", value: MsgProposeRevokeX509RootCert.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgProposeRevokeX509RootCert: Could not create message: ' + e.message) + } + }, + + msgUpdatePkiRevocationDistributionPoint({ value }: msgUpdatePkiRevocationDistributionPointParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgUpdatePkiRevocationDistributionPoint", value: MsgUpdatePkiRevocationDistributionPoint.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgUpdatePkiRevocationDistributionPoint: Could not create message: ' + e.message) + } + }, + + msgAssignVid({ value }: msgAssignVidParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgAssignVid", value: MsgAssignVid.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgAssignVid: Could not create message: ' + e.message) + } + }, + + msgAddNocX509RootCert({ value }: msgAddNocX509RootCertParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgAddNocX509RootCert", value: MsgAddNocX509RootCert.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgAddNocX509RootCert: Could not create message: ' + e.message) + } + }, + + msgProposeAddX509RootCert({ value }: msgProposeAddX509RootCertParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgProposeAddX509RootCert", value: MsgProposeAddX509RootCert.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgProposeAddX509RootCert: Could not create message: ' + e.message) + } + }, + + msgAddPkiRevocationDistributionPoint({ value }: msgAddPkiRevocationDistributionPointParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.pki.MsgAddPkiRevocationDistributionPoint", value: MsgAddPkiRevocationDistributionPoint.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgAddPkiRevocationDistributionPoint: Could not create message: ' + e.message) + } + }, + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + ApprovedCertificates: getStructure(typeApprovedCertificates.fromPartial({})), + ApprovedCertificatesBySubject: getStructure(typeApprovedCertificatesBySubject.fromPartial({})), + ApprovedCertificatesBySubjectKeyId: getStructure(typeApprovedCertificatesBySubjectKeyId.fromPartial({})), + ApprovedRootCertificates: getStructure(typeApprovedRootCertificates.fromPartial({})), + Certificate: getStructure(typeCertificate.fromPartial({})), + CertificateIdentifier: getStructure(typeCertificateIdentifier.fromPartial({})), + ChildCertificates: getStructure(typeChildCertificates.fromPartial({})), + Grant: getStructure(typeGrant.fromPartial({})), + NocIcaCertificates: getStructure(typeNocIcaCertificates.fromPartial({})), + NocRootCertificates: getStructure(typeNocRootCertificates.fromPartial({})), + NocRootCertificatesByVidAndSkid: getStructure(typeNocRootCertificatesByVidAndSkid.fromPartial({})), + PkiRevocationDistributionPoint: getStructure(typePkiRevocationDistributionPoint.fromPartial({})), + PkiRevocationDistributionPointsByIssuerSubjectKeyID: getStructure(typePkiRevocationDistributionPointsByIssuerSubjectKeyID.fromPartial({})), + ProposedCertificate: getStructure(typeProposedCertificate.fromPartial({})), + ProposedCertificateRevocation: getStructure(typeProposedCertificateRevocation.fromPartial({})), + RejectedCertificate: getStructure(typeRejectedCertificate.fromPartial({})), + RevokedCertificates: getStructure(typeRevokedCertificates.fromPartial({})), + RevokedNocRootCertificates: getStructure(typeRevokedNocRootCertificates.fromPartial({})), + RevokedRootCertificates: getStructure(typeRevokedRootCertificates.fromPartial({})), + UniqueCertificate: getStructure(typeUniqueCertificate.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + ZigbeeallianceDistributedcomplianceledgerPki: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/registry.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/registry.ts new file mode 100755 index 000000000..d5e359e44 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/registry.ts @@ -0,0 +1,39 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; +import { MsgApproveRevokeX509RootCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgRemoveX509Cert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgRejectAddX509RootCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgAddX509Cert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgAddNocX509IcaCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgDeletePkiRevocationDistributionPoint } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgRevokeX509Cert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgRevokeNocX509RootCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgApproveAddX509RootCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgRevokeNocX509IcaCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgProposeRevokeX509RootCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgUpdatePkiRevocationDistributionPoint } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgAssignVid } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgAddNocX509RootCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgProposeAddX509RootCert } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; +import { MsgAddPkiRevocationDistributionPoint } from "./types/zigbeealliance/distributedcomplianceledger/pki/tx"; + +const msgTypes: Array<[string, GeneratedType]> = [ + ["/zigbeealliance.distributedcomplianceledger.pki.MsgApproveRevokeX509RootCert", MsgApproveRevokeX509RootCert], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgRemoveX509Cert", MsgRemoveX509Cert], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgRejectAddX509RootCert", MsgRejectAddX509RootCert], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgAddX509Cert", MsgAddX509Cert], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgAddNocX509IcaCert", MsgAddNocX509IcaCert], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgDeletePkiRevocationDistributionPoint", MsgDeletePkiRevocationDistributionPoint], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgRevokeX509Cert", MsgRevokeX509Cert], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgRevokeNocX509RootCert", MsgRevokeNocX509RootCert], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgApproveAddX509RootCert", MsgApproveAddX509RootCert], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgRevokeNocX509IcaCert", MsgRevokeNocX509IcaCert], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgProposeRevokeX509RootCert", MsgProposeRevokeX509RootCert], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgUpdatePkiRevocationDistributionPoint", MsgUpdatePkiRevocationDistributionPoint], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgAssignVid", MsgAssignVid], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgAddNocX509RootCert", MsgAddNocX509RootCert], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgProposeAddX509RootCert", MsgProposeAddX509RootCert], + ["/zigbeealliance.distributedcomplianceledger.pki.MsgAddPkiRevocationDistributionPoint", MsgAddPkiRevocationDistributionPoint], + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/rest.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/rest.ts new file mode 100644 index 000000000..976c10243 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/rest.ts @@ -0,0 +1,1109 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +export interface DistributedcomplianceledgerpkiApprovedCertificates { + subject?: string; + subjectKeyId?: string; + certs?: PkiCertificate[]; + + /** @format int64 */ + schemaVersion?: number; +} + +export interface DistributedcomplianceledgerpkiApprovedCertificatesBySubject { + subject?: string; + subjectKeyIds?: string[]; +} + +export interface DistributedcomplianceledgerpkiApprovedRootCertificates { + certs?: PkiCertificateIdentifier[]; +} + +export interface DistributedcomplianceledgerpkiChildCertificates { + issuer?: string; + authorityKeyId?: string; + certIds?: PkiCertificateIdentifier[]; +} + +export interface DistributedcomplianceledgerpkiNocIcaCertificates { + /** @format int32 */ + vid?: number; + certs?: PkiCertificate[]; +} + +export interface DistributedcomplianceledgerpkiNocRootCertificates { + /** @format int32 */ + vid?: number; + certs?: PkiCertificate[]; +} + +export interface DistributedcomplianceledgerpkiNocRootCertificatesByVidAndSkid { + /** @format int32 */ + vid?: number; + subjectKeyId?: string; + certs?: PkiCertificate[]; + + /** @format float */ + tq?: number; +} + +export interface DistributedcomplianceledgerpkiPkiRevocationDistributionPoint { + /** @format int32 */ + vid?: number; + label?: string; + issuerSubjectKeyID?: string; + + /** @format int32 */ + pid?: number; + isPAA?: boolean; + crlSignerCertificate?: string; + dataURL?: string; + + /** @format uint64 */ + dataFileSize?: string; + dataDigest?: string; + + /** @format int64 */ + dataDigestType?: number; + + /** @format int64 */ + revocationType?: number; + + /** @format int64 */ + schemaVersion?: number; + crlSignerDelegator?: string; +} + +export interface DistributedcomplianceledgerpkiPkiRevocationDistributionPointsByIssuerSubjectKeyID { + issuerSubjectKeyID?: string; + points?: DistributedcomplianceledgerpkiPkiRevocationDistributionPoint[]; +} + +export interface DistributedcomplianceledgerpkiProposedCertificate { + subject?: string; + subjectKeyId?: string; + pemCert?: string; + serialNumber?: string; + owner?: string; + approvals?: PkiGrant[]; + subjectAsText?: string; + rejects?: PkiGrant[]; + + /** @format int32 */ + vid?: number; + + /** @format int64 */ + certSchemaVersion?: number; + + /** @format int64 */ + schemaVersion?: number; +} + +export interface DistributedcomplianceledgerpkiProposedCertificateRevocation { + subject?: string; + subjectKeyId?: string; + approvals?: PkiGrant[]; + subjectAsText?: string; + serialNumber?: string; + revokeChild?: boolean; + + /** @format int64 */ + schemaVersion?: number; +} + +export interface DistributedcomplianceledgerpkiRejectedCertificate { + subject?: string; + subjectKeyId?: string; + certs?: PkiCertificate[]; + + /** @format int64 */ + schemaVersion?: number; +} + +export interface DistributedcomplianceledgerpkiRevokedCertificates { + subject?: string; + subjectKeyId?: string; + certs?: PkiCertificate[]; + + /** @format int64 */ + schemaVersion?: number; +} + +export interface DistributedcomplianceledgerpkiRevokedNocRootCertificates { + subject?: string; + subjectKeyId?: string; + certs?: PkiCertificate[]; +} + +export interface DistributedcomplianceledgerpkiRevokedRootCertificates { + certs?: PkiCertificateIdentifier[]; +} + +export interface PkiCertificate { + pemCert?: string; + serialNumber?: string; + issuer?: string; + authorityKeyId?: string; + rootSubject?: string; + rootSubjectKeyId?: string; + isRoot?: boolean; + owner?: string; + subject?: string; + subjectKeyId?: string; + approvals?: PkiGrant[]; + subjectAsText?: string; + rejects?: PkiGrant[]; + + /** @format int32 */ + vid?: number; + isNoc?: boolean; + + /** @format int64 */ + schemaVersion?: number; +} + +export interface PkiCertificateIdentifier { + subject?: string; + subjectKeyId?: string; +} + +export interface PkiGrant { + address?: string; + + /** + * number of nanoseconds elapsed since January 1, 1970 UTC + * @format int64 + */ + time?: string; + info?: string; +} + +export type PkiMsgAddNocX509IcaCertResponse = object; + +export type PkiMsgAddNocX509RootCertResponse = object; + +export type PkiMsgAddPkiRevocationDistributionPointResponse = object; + +export type PkiMsgAddX509CertResponse = object; + +export type PkiMsgApproveAddX509RootCertResponse = object; + +export type PkiMsgApproveRevokeX509RootCertResponse = object; + +export type PkiMsgAssignVidResponse = object; + +export type PkiMsgDeletePkiRevocationDistributionPointResponse = object; + +export type PkiMsgProposeAddX509RootCertResponse = object; + +export type PkiMsgProposeRevokeX509RootCertResponse = object; + +export type PkiMsgRejectAddX509RootCertResponse = object; + +export type PkiMsgRemoveX509CertResponse = object; + +export type PkiMsgRevokeNocX509IcaCertResponse = object; + +export type PkiMsgRevokeNocX509RootCertResponse = object; + +export type PkiMsgRevokeX509CertResponse = object; + +export type PkiMsgUpdatePkiRevocationDistributionPointResponse = object; + +export interface PkiQueryAllApprovedCertificatesResponse { + approvedCertificates?: DistributedcomplianceledgerpkiApprovedCertificates[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface PkiQueryAllNocIcaCertificatesResponse { + nocIcaCertificates?: DistributedcomplianceledgerpkiNocIcaCertificates[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface PkiQueryAllNocRootCertificatesResponse { + nocRootCertificates?: DistributedcomplianceledgerpkiNocRootCertificates[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface PkiQueryAllPkiRevocationDistributionPointResponse { + PkiRevocationDistributionPoint?: DistributedcomplianceledgerpkiPkiRevocationDistributionPoint[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface PkiQueryAllProposedCertificateResponse { + proposedCertificate?: DistributedcomplianceledgerpkiProposedCertificate[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface PkiQueryAllProposedCertificateRevocationResponse { + proposedCertificateRevocation?: DistributedcomplianceledgerpkiProposedCertificateRevocation[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface PkiQueryAllRejectedCertificatesResponse { + rejectedCertificate?: DistributedcomplianceledgerpkiRejectedCertificate[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface PkiQueryAllRevokedCertificatesResponse { + revokedCertificates?: DistributedcomplianceledgerpkiRevokedCertificates[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface PkiQueryAllRevokedNocRootCertificatesResponse { + revokedNocRootCertificates?: DistributedcomplianceledgerpkiRevokedNocRootCertificates[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface PkiQueryGetApprovedCertificatesBySubjectResponse { + approvedCertificatesBySubject?: DistributedcomplianceledgerpkiApprovedCertificatesBySubject; +} + +export interface PkiQueryGetApprovedCertificatesResponse { + approvedCertificates?: DistributedcomplianceledgerpkiApprovedCertificates; +} + +export interface PkiQueryGetApprovedRootCertificatesResponse { + approvedRootCertificates?: DistributedcomplianceledgerpkiApprovedRootCertificates; +} + +export interface PkiQueryGetChildCertificatesResponse { + childCertificates?: DistributedcomplianceledgerpkiChildCertificates; +} + +export interface PkiQueryGetNocIcaCertificatesResponse { + nocIcaCertificates?: DistributedcomplianceledgerpkiNocIcaCertificates; +} + +export interface PkiQueryGetNocRootCertificatesByVidAndSkidResponse { + nocRootCertificatesByVidAndSkid?: DistributedcomplianceledgerpkiNocRootCertificatesByVidAndSkid; +} + +export interface PkiQueryGetNocRootCertificatesResponse { + nocRootCertificates?: DistributedcomplianceledgerpkiNocRootCertificates; +} + +export interface PkiQueryGetPkiRevocationDistributionPointResponse { + PkiRevocationDistributionPoint?: DistributedcomplianceledgerpkiPkiRevocationDistributionPoint; +} + +export interface PkiQueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse { + pkiRevocationDistributionPointsByIssuerSubjectKeyID?: DistributedcomplianceledgerpkiPkiRevocationDistributionPointsByIssuerSubjectKeyID; +} + +export interface PkiQueryGetProposedCertificateResponse { + proposedCertificate?: DistributedcomplianceledgerpkiProposedCertificate; +} + +export interface PkiQueryGetProposedCertificateRevocationResponse { + proposedCertificateRevocation?: DistributedcomplianceledgerpkiProposedCertificateRevocation; +} + +export interface PkiQueryGetRejectedCertificatesResponse { + rejectedCertificate?: DistributedcomplianceledgerpkiRejectedCertificate; +} + +export interface PkiQueryGetRevokedCertificatesResponse { + revokedCertificates?: DistributedcomplianceledgerpkiRevokedCertificates; +} + +export interface PkiQueryGetRevokedNocRootCertificatesResponse { + revokedNocRootCertificates?: DistributedcomplianceledgerpkiRevokedNocRootCertificates; +} + +export interface PkiQueryGetRevokedRootCertificatesResponse { + revokedRootCertificates?: DistributedcomplianceledgerpkiRevokedRootCertificates; +} + +export interface ProtobufAny { + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +/** +* message SomeRequest { + Foo some_parameter = 1; + PageRequest pagination = 2; + } +*/ +export interface V1Beta1PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + * @format byte + */ + key?: string; + + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + * @format uint64 + */ + offset?: string; + + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + * @format uint64 + */ + limit?: string; + + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + count_total?: boolean; + + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse?: boolean; +} + +/** +* PageResponse is to be embedded in gRPC response messages where the +corresponding request message has used PageRequest. + + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } +*/ +export interface V1Beta1PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + * @format byte + */ + next_key?: string; + + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + * @format uint64 + */ + total?: string; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title zigbeealliance/distributedcomplianceledger/pki/approved_certificates.proto + * @version version not set + */ +export class Api extends HttpClient { + /** + * No description + * + * @tags Query + * @name QueryApprovedCertificatesAll + * @summary Queries a list of ApprovedCertificates items. + * @request GET:/dcl/pki/certificates + */ + queryApprovedCertificatesAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + subjectKeyId?: string; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/pki/certificates`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryApprovedCertificatesBySubject + * @summary Queries a ApprovedCertificatesBySubject by index. + * @request GET:/dcl/pki/certificates/{subject} + */ + queryApprovedCertificatesBySubject = (subject: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/pki/certificates/${subject}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryApprovedCertificates + * @summary Queries a ApprovedCertificates by index. + * @request GET:/dcl/pki/certificates/{subject}/{subjectKeyId} + */ + queryApprovedCertificates = (subject: string, subjectKeyId: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/pki/certificates/${subject}/${subjectKeyId}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryChildCertificates + * @summary Queries a ChildCertificates by index. + * @request GET:/dcl/pki/child-certificates/{issuer}/{authorityKeyId} + */ + queryChildCertificates = (issuer: string, authorityKeyId: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/pki/child-certificates/${issuer}/${authorityKeyId}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryNocIcaCertificatesAll + * @summary Queries a list of NocIcaCertificates items. + * @request GET:/dcl/pki/noc-ica-certificates + */ + queryNocIcaCertificatesAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/pki/noc-ica-certificates`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryNocIcaCertificates + * @summary Queries a NocIcaCertificates by index. + * @request GET:/dcl/pki/noc-ica-certificates/{vid} + */ + queryNocIcaCertificates = (vid: number, params: RequestParams = {}) => + this.request({ + path: `/dcl/pki/noc-ica-certificates/${vid}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryNocRootCertificatesAll + * @summary Queries a list of NocRootCertificates items. + * @request GET:/dcl/pki/noc-root-certificates + */ + queryNocRootCertificatesAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/pki/noc-root-certificates`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryNocRootCertificates + * @summary Queries a NocRootCertificates by index. + * @request GET:/dcl/pki/noc-root-certificates/{vid} + */ + queryNocRootCertificates = (vid: number, params: RequestParams = {}) => + this.request({ + path: `/dcl/pki/noc-root-certificates/${vid}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryNocRootCertificatesByVidAndSkid + * @summary Queries a NocRootCertificatesByVidAndSkid by index. + * @request GET:/dcl/pki/noc-root-certificates/{vid}/{subjectKeyId} + */ + queryNocRootCertificatesByVidAndSkid = (vid: number, subjectKeyId: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/pki/noc-root-certificates/${vid}/${subjectKeyId}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryProposedCertificateAll + * @summary Queries a list of ProposedCertificate items. + * @request GET:/dcl/pki/proposed-certificates + */ + queryProposedCertificateAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/pki/proposed-certificates`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryProposedCertificate + * @summary Queries a ProposedCertificate by index. + * @request GET:/dcl/pki/proposed-certificates/{subject}/{subjectKeyId} + */ + queryProposedCertificate = (subject: string, subjectKeyId: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/pki/proposed-certificates/${subject}/${subjectKeyId}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryProposedCertificateRevocationAll + * @summary Queries a list of ProposedCertificateRevocation items. + * @request GET:/dcl/pki/proposed-revocation-certificates + */ + queryProposedCertificateRevocationAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/pki/proposed-revocation-certificates`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryProposedCertificateRevocation + * @summary Queries a ProposedCertificateRevocation by index. + * @request GET:/dcl/pki/proposed-revocation-certificates/{subject}/{subjectKeyId} + */ + queryProposedCertificateRevocation = ( + subject: string, + subjectKeyId: string, + query?: { serialNumber?: string }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/pki/proposed-revocation-certificates/${subject}/${subjectKeyId}`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRejectedCertificateAll + * @summary Queries a list of RejectedCertificate items. + * @request GET:/dcl/pki/rejected-certificates + */ + queryRejectedCertificateAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/pki/rejected-certificates`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRejectedCertificate + * @summary Queries a RejectedCertificate by index. + * @request GET:/dcl/pki/rejected-certificates/{subject}/{subjectKeyId} + */ + queryRejectedCertificate = (subject: string, subjectKeyId: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/pki/rejected-certificates/${subject}/${subjectKeyId}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryPkiRevocationDistributionPointAll + * @summary Queries a list of PkiRevocationDistributionPoint items. + * @request GET:/dcl/pki/revocation-points + */ + queryPkiRevocationDistributionPointAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/pki/revocation-points`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryPkiRevocationDistributionPointsByIssuerSubjectKeyId + * @summary Queries a PkiRevocationDistributionPointsByIssuerSubjectKeyID by index. + * @request GET:/dcl/pki/revocation-points/{issuerSubjectKeyID} + */ + queryPkiRevocationDistributionPointsByIssuerSubjectKeyID = (issuerSubjectKeyId: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/pki/revocation-points/${issuerSubjectKeyId}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryPkiRevocationDistributionPoint + * @summary Queries a PkiRevocationDistributionPoint by index. + * @request GET:/dcl/pki/revocation-points/{issuerSubjectKeyID}/{vid}/{label} + */ + queryPkiRevocationDistributionPoint = ( + issuerSubjectKeyId: string, + vid: number, + label: string, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/pki/revocation-points/${issuerSubjectKeyId}/${vid}/${label}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRevokedCertificatesAll + * @summary Queries a list of RevokedCertificates items. + * @request GET:/dcl/pki/revoked-certificates + */ + queryRevokedCertificatesAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/pki/revoked-certificates`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRevokedCertificates + * @summary Queries a RevokedCertificates by index. + * @request GET:/dcl/pki/revoked-certificates/{subject}/{subjectKeyId} + */ + queryRevokedCertificates = (subject: string, subjectKeyId: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/pki/revoked-certificates/${subject}/${subjectKeyId}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRevokedNocRootCertificatesAll + * @summary Queries a list of RevokedNocRootCertificates items. + * @request GET:/dcl/pki/revoked-noc-root-certificates + */ + queryRevokedNocRootCertificatesAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/pki/revoked-noc-root-certificates`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRevokedNocRootCertificates + * @summary Queries a RevokedNocRootCertificates by index. + * @request GET:/dcl/pki/revoked-noc-root-certificates/{subject}/{subjectKeyId} + */ + queryRevokedNocRootCertificates = (subject: string, subjectKeyId: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/pki/revoked-noc-root-certificates/${subject}/${subjectKeyId}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRevokedRootCertificates + * @summary Queries a RevokedRootCertificates by index. + * @request GET:/dcl/pki/revoked-root-certificates + */ + queryRevokedRootCertificates = (params: RequestParams = {}) => + this.request({ + path: `/dcl/pki/revoked-root-certificates`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryApprovedRootCertificates + * @summary Queries a ApprovedRootCertificates by index. + * @request GET:/dcl/pki/root-certificates + */ + queryApprovedRootCertificates = (params: RequestParams = {}) => + this.request({ + path: `/dcl/pki/root-certificates`, + method: "GET", + format: "json", + ...params, + }); +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types.ts new file mode 100755 index 000000000..d1ee67322 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types.ts @@ -0,0 +1,45 @@ +import { ApprovedCertificates } from "./types/zigbeealliance/distributedcomplianceledger/pki/approved_certificates" +import { ApprovedCertificatesBySubject } from "./types/zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject" +import { ApprovedCertificatesBySubjectKeyId } from "./types/zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject_key_id" +import { ApprovedRootCertificates } from "./types/zigbeealliance/distributedcomplianceledger/pki/approved_root_certificates" +import { Certificate } from "./types/zigbeealliance/distributedcomplianceledger/pki/certificate" +import { CertificateIdentifier } from "./types/zigbeealliance/distributedcomplianceledger/pki/certificate_identifier" +import { ChildCertificates } from "./types/zigbeealliance/distributedcomplianceledger/pki/child_certificates" +import { Grant } from "./types/zigbeealliance/distributedcomplianceledger/pki/grant" +import { NocIcaCertificates } from "./types/zigbeealliance/distributedcomplianceledger/pki/noc_ica_certificates" +import { NocRootCertificates } from "./types/zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates" +import { NocRootCertificatesByVidAndSkid } from "./types/zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates_by_vid_and_skid" +import { PkiRevocationDistributionPoint } from "./types/zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_point" +import { PkiRevocationDistributionPointsByIssuerSubjectKeyID } from "./types/zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_points_by_issuer_subject_key_id" +import { ProposedCertificate } from "./types/zigbeealliance/distributedcomplianceledger/pki/proposed_certificate" +import { ProposedCertificateRevocation } from "./types/zigbeealliance/distributedcomplianceledger/pki/proposed_certificate_revocation" +import { RejectedCertificate } from "./types/zigbeealliance/distributedcomplianceledger/pki/rejected_certificate" +import { RevokedCertificates } from "./types/zigbeealliance/distributedcomplianceledger/pki/revoked_certificates" +import { RevokedNocRootCertificates } from "./types/zigbeealliance/distributedcomplianceledger/pki/revoked_noc_root_certificates" +import { RevokedRootCertificates } from "./types/zigbeealliance/distributedcomplianceledger/pki/revoked_root_certificates" +import { UniqueCertificate } from "./types/zigbeealliance/distributedcomplianceledger/pki/unique_certificate" + + +export { + ApprovedCertificates, + ApprovedCertificatesBySubject, + ApprovedCertificatesBySubjectKeyId, + ApprovedRootCertificates, + Certificate, + CertificateIdentifier, + ChildCertificates, + Grant, + NocIcaCertificates, + NocRootCertificates, + NocRootCertificatesByVidAndSkid, + PkiRevocationDistributionPoint, + PkiRevocationDistributionPointsByIssuerSubjectKeyID, + ProposedCertificate, + ProposedCertificateRevocation, + RejectedCertificate, + RevokedCertificates, + RevokedNocRootCertificates, + RevokedRootCertificates, + UniqueCertificate, + + } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/cosmos/base/query/v1beta1/pagination.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/cosmos/base/query/v1beta1/pagination.ts new file mode 100644 index 000000000..a31ca249d --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/cosmos/base/query/v1beta1/pagination.ts @@ -0,0 +1,286 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.base.query.v1beta1"; + +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset: number; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: number; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + countTotal: boolean; + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse: boolean; +} + +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total: number; +} + +function createBasePageRequest(): PageRequest { + return { key: new Uint8Array(), offset: 0, limit: 0, countTotal: false, reverse: false }; +} + +export const PageRequest = { + encode(message: PageRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.offset !== 0) { + writer.uint32(16).uint64(message.offset); + } + if (message.limit !== 0) { + writer.uint32(24).uint64(message.limit); + } + if (message.countTotal === true) { + writer.uint32(32).bool(message.countTotal); + } + if (message.reverse === true) { + writer.uint32(40).bool(message.reverse); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + case 2: + message.offset = longToNumber(reader.uint64() as Long); + break; + case 3: + message.limit = longToNumber(reader.uint64() as Long); + break; + case 4: + message.countTotal = reader.bool(); + break; + case 5: + message.reverse = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageRequest { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + offset: isSet(object.offset) ? Number(object.offset) : 0, + limit: isSet(object.limit) ? Number(object.limit) : 0, + countTotal: isSet(object.countTotal) ? Boolean(object.countTotal) : false, + reverse: isSet(object.reverse) ? Boolean(object.reverse) : false, + }; + }, + + toJSON(message: PageRequest): unknown { + const obj: any = {}; + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.offset !== undefined && (obj.offset = Math.round(message.offset)); + message.limit !== undefined && (obj.limit = Math.round(message.limit)); + message.countTotal !== undefined && (obj.countTotal = message.countTotal); + message.reverse !== undefined && (obj.reverse = message.reverse); + return obj; + }, + + fromPartial, I>>(object: I): PageRequest { + const message = createBasePageRequest(); + message.key = object.key ?? new Uint8Array(); + message.offset = object.offset ?? 0; + message.limit = object.limit ?? 0; + message.countTotal = object.countTotal ?? false; + message.reverse = object.reverse ?? false; + return message; + }, +}; + +function createBasePageResponse(): PageResponse { + return { nextKey: new Uint8Array(), total: 0 }; +} + +export const PageResponse = { + encode(message: PageResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextKey.length !== 0) { + writer.uint32(10).bytes(message.nextKey); + } + if (message.total !== 0) { + writer.uint32(16).uint64(message.total); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nextKey = reader.bytes(); + break; + case 2: + message.total = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageResponse { + return { + nextKey: isSet(object.nextKey) ? bytesFromBase64(object.nextKey) : new Uint8Array(), + total: isSet(object.total) ? Number(object.total) : 0, + }; + }, + + toJSON(message: PageResponse): unknown { + const obj: any = {}; + message.nextKey !== undefined + && (obj.nextKey = base64FromBytes(message.nextKey !== undefined ? message.nextKey : new Uint8Array())); + message.total !== undefined && (obj.total = Math.round(message.total)); + return obj; + }, + + fromPartial, I>>(object: I): PageResponse { + const message = createBasePageResponse(); + message.nextKey = object.nextKey ?? new Uint8Array(); + message.total = object.total ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/cosmos_proto/cosmos.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..79c8d3486 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/cosmos_proto/cosmos.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/gogoproto/gogo.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/google/api/annotations.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/google/api/annotations.ts new file mode 100644 index 000000000..aace4787f --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/google/api/annotations.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "google.api"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/google/api/http.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/google/api/http.ts new file mode 100644 index 000000000..17e770d15 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/google/api/http.ts @@ -0,0 +1,589 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get: + | string + | undefined; + /** Used for updating a resource. */ + put: + | string + | undefined; + /** Used for creating a resource. */ + post: + | string + | undefined; + /** Used for deleting a resource. */ + delete: + | string + | undefined; + /** Used for updating a resource. */ + patch: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined + && (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined + && (obj.custom = message.custom ? CustomHttpPattern.toJSON(message.custom) : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.additionalBindings = []; + } + return obj; + }, + + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/google/protobuf/descriptor.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/approved_certificates.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/approved_certificates.ts new file mode 100644 index 000000000..b4f1054dc --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/approved_certificates.ts @@ -0,0 +1,107 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Certificate } from "./certificate"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface ApprovedCertificates { + subject: string; + subjectKeyId: string; + certs: Certificate[]; + schemaVersion: number; +} + +function createBaseApprovedCertificates(): ApprovedCertificates { + return { subject: "", subjectKeyId: "", certs: [], schemaVersion: 0 }; +} + +export const ApprovedCertificates = { + encode(message: ApprovedCertificates, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + for (const v of message.certs) { + Certificate.encode(v!, writer.uint32(26).fork()).ldelim(); + } + if (message.schemaVersion !== 0) { + writer.uint32(32).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ApprovedCertificates { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseApprovedCertificates(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + case 3: + message.certs.push(Certificate.decode(reader, reader.uint32())); + break; + case 4: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ApprovedCertificates { + return { + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + certs: Array.isArray(object?.certs) ? object.certs.map((e: any) => Certificate.fromJSON(e)) : [], + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: ApprovedCertificates): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + if (message.certs) { + obj.certs = message.certs.map((e) => e ? Certificate.toJSON(e) : undefined); + } else { + obj.certs = []; + } + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): ApprovedCertificates { + const message = createBaseApprovedCertificates(); + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.certs = object.certs?.map((e) => Certificate.fromPartial(e)) || []; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject.ts new file mode 100644 index 000000000..f60e1d433 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject.ts @@ -0,0 +1,88 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface ApprovedCertificatesBySubject { + subject: string; + subjectKeyIds: string[]; +} + +function createBaseApprovedCertificatesBySubject(): ApprovedCertificatesBySubject { + return { subject: "", subjectKeyIds: [] }; +} + +export const ApprovedCertificatesBySubject = { + encode(message: ApprovedCertificatesBySubject, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + for (const v of message.subjectKeyIds) { + writer.uint32(18).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ApprovedCertificatesBySubject { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseApprovedCertificatesBySubject(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + case 2: + message.subjectKeyIds.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ApprovedCertificatesBySubject { + return { + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyIds: Array.isArray(object?.subjectKeyIds) ? object.subjectKeyIds.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: ApprovedCertificatesBySubject): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + if (message.subjectKeyIds) { + obj.subjectKeyIds = message.subjectKeyIds.map((e) => e); + } else { + obj.subjectKeyIds = []; + } + return obj; + }, + + fromPartial, I>>( + object: I, + ): ApprovedCertificatesBySubject { + const message = createBaseApprovedCertificatesBySubject(); + message.subject = object.subject ?? ""; + message.subjectKeyIds = object.subjectKeyIds?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject_key_id.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject_key_id.ts new file mode 100644 index 000000000..58cbe38a5 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject_key_id.ts @@ -0,0 +1,89 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Certificate } from "./certificate"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface ApprovedCertificatesBySubjectKeyId { + subjectKeyId: string; + certs: Certificate[]; +} + +function createBaseApprovedCertificatesBySubjectKeyId(): ApprovedCertificatesBySubjectKeyId { + return { subjectKeyId: "", certs: [] }; +} + +export const ApprovedCertificatesBySubjectKeyId = { + encode(message: ApprovedCertificatesBySubjectKeyId, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subjectKeyId !== "") { + writer.uint32(10).string(message.subjectKeyId); + } + for (const v of message.certs) { + Certificate.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ApprovedCertificatesBySubjectKeyId { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseApprovedCertificatesBySubjectKeyId(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subjectKeyId = reader.string(); + break; + case 2: + message.certs.push(Certificate.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ApprovedCertificatesBySubjectKeyId { + return { + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + certs: Array.isArray(object?.certs) ? object.certs.map((e: any) => Certificate.fromJSON(e)) : [], + }; + }, + + toJSON(message: ApprovedCertificatesBySubjectKeyId): unknown { + const obj: any = {}; + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + if (message.certs) { + obj.certs = message.certs.map((e) => e ? Certificate.toJSON(e) : undefined); + } else { + obj.certs = []; + } + return obj; + }, + + fromPartial, I>>( + object: I, + ): ApprovedCertificatesBySubjectKeyId { + const message = createBaseApprovedCertificatesBySubjectKeyId(); + message.subjectKeyId = object.subjectKeyId ?? ""; + message.certs = object.certs?.map((e) => Certificate.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/approved_root_certificates.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/approved_root_certificates.ts new file mode 100644 index 000000000..dd756f615 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/approved_root_certificates.ts @@ -0,0 +1,73 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { CertificateIdentifier } from "./certificate_identifier"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface ApprovedRootCertificates { + certs: CertificateIdentifier[]; +} + +function createBaseApprovedRootCertificates(): ApprovedRootCertificates { + return { certs: [] }; +} + +export const ApprovedRootCertificates = { + encode(message: ApprovedRootCertificates, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.certs) { + CertificateIdentifier.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ApprovedRootCertificates { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseApprovedRootCertificates(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.certs.push(CertificateIdentifier.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ApprovedRootCertificates { + return { + certs: Array.isArray(object?.certs) ? object.certs.map((e: any) => CertificateIdentifier.fromJSON(e)) : [], + }; + }, + + toJSON(message: ApprovedRootCertificates): unknown { + const obj: any = {}; + if (message.certs) { + obj.certs = message.certs.map((e) => e ? CertificateIdentifier.toJSON(e) : undefined); + } else { + obj.certs = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ApprovedRootCertificates { + const message = createBaseApprovedRootCertificates(); + message.certs = object.certs?.map((e) => CertificateIdentifier.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/certificate.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/certificate.ts new file mode 100644 index 000000000..edf0dec2a --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/certificate.ts @@ -0,0 +1,248 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Grant } from "./grant"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface Certificate { + pemCert: string; + serialNumber: string; + issuer: string; + authorityKeyId: string; + rootSubject: string; + rootSubjectKeyId: string; + isRoot: boolean; + owner: string; + subject: string; + subjectKeyId: string; + approvals: Grant[]; + subjectAsText: string; + rejects: Grant[]; + vid: number; + isNoc: boolean; + schemaVersion: number; +} + +function createBaseCertificate(): Certificate { + return { + pemCert: "", + serialNumber: "", + issuer: "", + authorityKeyId: "", + rootSubject: "", + rootSubjectKeyId: "", + isRoot: false, + owner: "", + subject: "", + subjectKeyId: "", + approvals: [], + subjectAsText: "", + rejects: [], + vid: 0, + isNoc: false, + schemaVersion: 0, + }; +} + +export const Certificate = { + encode(message: Certificate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pemCert !== "") { + writer.uint32(10).string(message.pemCert); + } + if (message.serialNumber !== "") { + writer.uint32(18).string(message.serialNumber); + } + if (message.issuer !== "") { + writer.uint32(26).string(message.issuer); + } + if (message.authorityKeyId !== "") { + writer.uint32(34).string(message.authorityKeyId); + } + if (message.rootSubject !== "") { + writer.uint32(42).string(message.rootSubject); + } + if (message.rootSubjectKeyId !== "") { + writer.uint32(50).string(message.rootSubjectKeyId); + } + if (message.isRoot === true) { + writer.uint32(56).bool(message.isRoot); + } + if (message.owner !== "") { + writer.uint32(66).string(message.owner); + } + if (message.subject !== "") { + writer.uint32(74).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(82).string(message.subjectKeyId); + } + for (const v of message.approvals) { + Grant.encode(v!, writer.uint32(90).fork()).ldelim(); + } + if (message.subjectAsText !== "") { + writer.uint32(98).string(message.subjectAsText); + } + for (const v of message.rejects) { + Grant.encode(v!, writer.uint32(106).fork()).ldelim(); + } + if (message.vid !== 0) { + writer.uint32(112).int32(message.vid); + } + if (message.isNoc === true) { + writer.uint32(120).bool(message.isNoc); + } + if (message.schemaVersion !== 0) { + writer.uint32(128).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Certificate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCertificate(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pemCert = reader.string(); + break; + case 2: + message.serialNumber = reader.string(); + break; + case 3: + message.issuer = reader.string(); + break; + case 4: + message.authorityKeyId = reader.string(); + break; + case 5: + message.rootSubject = reader.string(); + break; + case 6: + message.rootSubjectKeyId = reader.string(); + break; + case 7: + message.isRoot = reader.bool(); + break; + case 8: + message.owner = reader.string(); + break; + case 9: + message.subject = reader.string(); + break; + case 10: + message.subjectKeyId = reader.string(); + break; + case 11: + message.approvals.push(Grant.decode(reader, reader.uint32())); + break; + case 12: + message.subjectAsText = reader.string(); + break; + case 13: + message.rejects.push(Grant.decode(reader, reader.uint32())); + break; + case 14: + message.vid = reader.int32(); + break; + case 15: + message.isNoc = reader.bool(); + break; + case 16: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Certificate { + return { + pemCert: isSet(object.pemCert) ? String(object.pemCert) : "", + serialNumber: isSet(object.serialNumber) ? String(object.serialNumber) : "", + issuer: isSet(object.issuer) ? String(object.issuer) : "", + authorityKeyId: isSet(object.authorityKeyId) ? String(object.authorityKeyId) : "", + rootSubject: isSet(object.rootSubject) ? String(object.rootSubject) : "", + rootSubjectKeyId: isSet(object.rootSubjectKeyId) ? String(object.rootSubjectKeyId) : "", + isRoot: isSet(object.isRoot) ? Boolean(object.isRoot) : false, + owner: isSet(object.owner) ? String(object.owner) : "", + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + approvals: Array.isArray(object?.approvals) ? object.approvals.map((e: any) => Grant.fromJSON(e)) : [], + subjectAsText: isSet(object.subjectAsText) ? String(object.subjectAsText) : "", + rejects: Array.isArray(object?.rejects) ? object.rejects.map((e: any) => Grant.fromJSON(e)) : [], + vid: isSet(object.vid) ? Number(object.vid) : 0, + isNoc: isSet(object.isNoc) ? Boolean(object.isNoc) : false, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: Certificate): unknown { + const obj: any = {}; + message.pemCert !== undefined && (obj.pemCert = message.pemCert); + message.serialNumber !== undefined && (obj.serialNumber = message.serialNumber); + message.issuer !== undefined && (obj.issuer = message.issuer); + message.authorityKeyId !== undefined && (obj.authorityKeyId = message.authorityKeyId); + message.rootSubject !== undefined && (obj.rootSubject = message.rootSubject); + message.rootSubjectKeyId !== undefined && (obj.rootSubjectKeyId = message.rootSubjectKeyId); + message.isRoot !== undefined && (obj.isRoot = message.isRoot); + message.owner !== undefined && (obj.owner = message.owner); + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + if (message.approvals) { + obj.approvals = message.approvals.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.approvals = []; + } + message.subjectAsText !== undefined && (obj.subjectAsText = message.subjectAsText); + if (message.rejects) { + obj.rejects = message.rejects.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.rejects = []; + } + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.isNoc !== undefined && (obj.isNoc = message.isNoc); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): Certificate { + const message = createBaseCertificate(); + message.pemCert = object.pemCert ?? ""; + message.serialNumber = object.serialNumber ?? ""; + message.issuer = object.issuer ?? ""; + message.authorityKeyId = object.authorityKeyId ?? ""; + message.rootSubject = object.rootSubject ?? ""; + message.rootSubjectKeyId = object.rootSubjectKeyId ?? ""; + message.isRoot = object.isRoot ?? false; + message.owner = object.owner ?? ""; + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.approvals = object.approvals?.map((e) => Grant.fromPartial(e)) || []; + message.subjectAsText = object.subjectAsText ?? ""; + message.rejects = object.rejects?.map((e) => Grant.fromPartial(e)) || []; + message.vid = object.vid ?? 0; + message.isNoc = object.isNoc ?? false; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/certificate_identifier.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/certificate_identifier.ts new file mode 100644 index 000000000..c0cf5d12b --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/certificate_identifier.ts @@ -0,0 +1,82 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface CertificateIdentifier { + subject: string; + subjectKeyId: string; +} + +function createBaseCertificateIdentifier(): CertificateIdentifier { + return { subject: "", subjectKeyId: "" }; +} + +export const CertificateIdentifier = { + encode(message: CertificateIdentifier, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CertificateIdentifier { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCertificateIdentifier(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CertificateIdentifier { + return { + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + }; + }, + + toJSON(message: CertificateIdentifier): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + return obj; + }, + + fromPartial, I>>(object: I): CertificateIdentifier { + const message = createBaseCertificateIdentifier(); + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/child_certificates.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/child_certificates.ts new file mode 100644 index 000000000..71cac0663 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/child_certificates.ts @@ -0,0 +1,97 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { CertificateIdentifier } from "./certificate_identifier"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface ChildCertificates { + issuer: string; + authorityKeyId: string; + certIds: CertificateIdentifier[]; +} + +function createBaseChildCertificates(): ChildCertificates { + return { issuer: "", authorityKeyId: "", certIds: [] }; +} + +export const ChildCertificates = { + encode(message: ChildCertificates, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.issuer !== "") { + writer.uint32(10).string(message.issuer); + } + if (message.authorityKeyId !== "") { + writer.uint32(18).string(message.authorityKeyId); + } + for (const v of message.certIds) { + CertificateIdentifier.encode(v!, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ChildCertificates { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseChildCertificates(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.issuer = reader.string(); + break; + case 2: + message.authorityKeyId = reader.string(); + break; + case 3: + message.certIds.push(CertificateIdentifier.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ChildCertificates { + return { + issuer: isSet(object.issuer) ? String(object.issuer) : "", + authorityKeyId: isSet(object.authorityKeyId) ? String(object.authorityKeyId) : "", + certIds: Array.isArray(object?.certIds) ? object.certIds.map((e: any) => CertificateIdentifier.fromJSON(e)) : [], + }; + }, + + toJSON(message: ChildCertificates): unknown { + const obj: any = {}; + message.issuer !== undefined && (obj.issuer = message.issuer); + message.authorityKeyId !== undefined && (obj.authorityKeyId = message.authorityKeyId); + if (message.certIds) { + obj.certIds = message.certIds.map((e) => e ? CertificateIdentifier.toJSON(e) : undefined); + } else { + obj.certIds = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ChildCertificates { + const message = createBaseChildCertificates(); + message.issuer = object.issuer ?? ""; + message.authorityKeyId = object.authorityKeyId ?? ""; + message.certIds = object.certIds?.map((e) => CertificateIdentifier.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/genesis.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/genesis.ts new file mode 100644 index 000000000..279824653 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/genesis.ts @@ -0,0 +1,426 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { ApprovedCertificates } from "./approved_certificates"; +import { ApprovedCertificatesBySubject } from "./approved_certificates_by_subject"; +import { ApprovedCertificatesBySubjectKeyId } from "./approved_certificates_by_subject_key_id"; +import { ApprovedRootCertificates } from "./approved_root_certificates"; +import { ChildCertificates } from "./child_certificates"; +import { NocIcaCertificates } from "./noc_ica_certificates"; +import { NocRootCertificates } from "./noc_root_certificates"; +import { NocRootCertificatesByVidAndSkid } from "./noc_root_certificates_by_vid_and_skid"; +import { PkiRevocationDistributionPoint } from "./pki_revocation_distribution_point"; +import { PkiRevocationDistributionPointsByIssuerSubjectKeyID } from "./pki_revocation_distribution_points_by_issuer_subject_key_id"; +import { ProposedCertificate } from "./proposed_certificate"; +import { ProposedCertificateRevocation } from "./proposed_certificate_revocation"; +import { RejectedCertificate } from "./rejected_certificate"; +import { RevokedCertificates } from "./revoked_certificates"; +import { RevokedNocRootCertificates } from "./revoked_noc_root_certificates"; +import { RevokedRootCertificates } from "./revoked_root_certificates"; +import { UniqueCertificate } from "./unique_certificate"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +/** GenesisState defines the pki module's genesis state. */ +export interface GenesisState { + approvedCertificatesList: ApprovedCertificates[]; + proposedCertificateList: ProposedCertificate[]; + childCertificatesList: ChildCertificates[]; + proposedCertificateRevocationList: ProposedCertificateRevocation[]; + revokedCertificatesList: RevokedCertificates[]; + uniqueCertificateList: UniqueCertificate[]; + approvedRootCertificates: ApprovedRootCertificates | undefined; + revokedRootCertificates: RevokedRootCertificates | undefined; + approvedCertificatesBySubjectList: ApprovedCertificatesBySubject[]; + rejectedCertificateList: RejectedCertificate[]; + PkiRevocationDistributionPointList: PkiRevocationDistributionPoint[]; + PkiRevocationDistributionPointsByIssuerSubjectKeyIDList: PkiRevocationDistributionPointsByIssuerSubjectKeyID[]; + approvedCertificatesBySubjectKeyIdList: ApprovedCertificatesBySubjectKeyId[]; + nocRootCertificatesList: NocRootCertificates[]; + nocIcaCertificatesList: NocIcaCertificates[]; + revokedNocRootCertificatesList: RevokedNocRootCertificates[]; + /** this line is used by starport scaffolding # genesis/proto/state */ + nocRootCertificatesByVidAndSkidList: NocRootCertificatesByVidAndSkid[]; +} + +function createBaseGenesisState(): GenesisState { + return { + approvedCertificatesList: [], + proposedCertificateList: [], + childCertificatesList: [], + proposedCertificateRevocationList: [], + revokedCertificatesList: [], + uniqueCertificateList: [], + approvedRootCertificates: undefined, + revokedRootCertificates: undefined, + approvedCertificatesBySubjectList: [], + rejectedCertificateList: [], + PkiRevocationDistributionPointList: [], + PkiRevocationDistributionPointsByIssuerSubjectKeyIDList: [], + approvedCertificatesBySubjectKeyIdList: [], + nocRootCertificatesList: [], + nocIcaCertificatesList: [], + revokedNocRootCertificatesList: [], + nocRootCertificatesByVidAndSkidList: [], + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.approvedCertificatesList) { + ApprovedCertificates.encode(v!, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.proposedCertificateList) { + ProposedCertificate.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.childCertificatesList) { + ChildCertificates.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.proposedCertificateRevocationList) { + ProposedCertificateRevocation.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.revokedCertificatesList) { + RevokedCertificates.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.uniqueCertificateList) { + UniqueCertificate.encode(v!, writer.uint32(50).fork()).ldelim(); + } + if (message.approvedRootCertificates !== undefined) { + ApprovedRootCertificates.encode(message.approvedRootCertificates, writer.uint32(58).fork()).ldelim(); + } + if (message.revokedRootCertificates !== undefined) { + RevokedRootCertificates.encode(message.revokedRootCertificates, writer.uint32(66).fork()).ldelim(); + } + for (const v of message.approvedCertificatesBySubjectList) { + ApprovedCertificatesBySubject.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.rejectedCertificateList) { + RejectedCertificate.encode(v!, writer.uint32(82).fork()).ldelim(); + } + for (const v of message.PkiRevocationDistributionPointList) { + PkiRevocationDistributionPoint.encode(v!, writer.uint32(90).fork()).ldelim(); + } + for (const v of message.PkiRevocationDistributionPointsByIssuerSubjectKeyIDList) { + PkiRevocationDistributionPointsByIssuerSubjectKeyID.encode(v!, writer.uint32(98).fork()).ldelim(); + } + for (const v of message.approvedCertificatesBySubjectKeyIdList) { + ApprovedCertificatesBySubjectKeyId.encode(v!, writer.uint32(106).fork()).ldelim(); + } + for (const v of message.nocRootCertificatesList) { + NocRootCertificates.encode(v!, writer.uint32(114).fork()).ldelim(); + } + for (const v of message.nocIcaCertificatesList) { + NocIcaCertificates.encode(v!, writer.uint32(122).fork()).ldelim(); + } + for (const v of message.revokedNocRootCertificatesList) { + RevokedNocRootCertificates.encode(v!, writer.uint32(130).fork()).ldelim(); + } + for (const v of message.nocRootCertificatesByVidAndSkidList) { + NocRootCertificatesByVidAndSkid.encode(v!, writer.uint32(138).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.approvedCertificatesList.push(ApprovedCertificates.decode(reader, reader.uint32())); + break; + case 2: + message.proposedCertificateList.push(ProposedCertificate.decode(reader, reader.uint32())); + break; + case 3: + message.childCertificatesList.push(ChildCertificates.decode(reader, reader.uint32())); + break; + case 4: + message.proposedCertificateRevocationList.push(ProposedCertificateRevocation.decode(reader, reader.uint32())); + break; + case 5: + message.revokedCertificatesList.push(RevokedCertificates.decode(reader, reader.uint32())); + break; + case 6: + message.uniqueCertificateList.push(UniqueCertificate.decode(reader, reader.uint32())); + break; + case 7: + message.approvedRootCertificates = ApprovedRootCertificates.decode(reader, reader.uint32()); + break; + case 8: + message.revokedRootCertificates = RevokedRootCertificates.decode(reader, reader.uint32()); + break; + case 9: + message.approvedCertificatesBySubjectList.push(ApprovedCertificatesBySubject.decode(reader, reader.uint32())); + break; + case 10: + message.rejectedCertificateList.push(RejectedCertificate.decode(reader, reader.uint32())); + break; + case 11: + message.PkiRevocationDistributionPointList.push( + PkiRevocationDistributionPoint.decode(reader, reader.uint32()), + ); + break; + case 12: + message.PkiRevocationDistributionPointsByIssuerSubjectKeyIDList.push( + PkiRevocationDistributionPointsByIssuerSubjectKeyID.decode(reader, reader.uint32()), + ); + break; + case 13: + message.approvedCertificatesBySubjectKeyIdList.push( + ApprovedCertificatesBySubjectKeyId.decode(reader, reader.uint32()), + ); + break; + case 14: + message.nocRootCertificatesList.push(NocRootCertificates.decode(reader, reader.uint32())); + break; + case 15: + message.nocIcaCertificatesList.push(NocIcaCertificates.decode(reader, reader.uint32())); + break; + case 16: + message.revokedNocRootCertificatesList.push(RevokedNocRootCertificates.decode(reader, reader.uint32())); + break; + case 17: + message.nocRootCertificatesByVidAndSkidList.push( + NocRootCertificatesByVidAndSkid.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GenesisState { + return { + approvedCertificatesList: Array.isArray(object?.approvedCertificatesList) + ? object.approvedCertificatesList.map((e: any) => ApprovedCertificates.fromJSON(e)) + : [], + proposedCertificateList: Array.isArray(object?.proposedCertificateList) + ? object.proposedCertificateList.map((e: any) => ProposedCertificate.fromJSON(e)) + : [], + childCertificatesList: Array.isArray(object?.childCertificatesList) + ? object.childCertificatesList.map((e: any) => ChildCertificates.fromJSON(e)) + : [], + proposedCertificateRevocationList: Array.isArray(object?.proposedCertificateRevocationList) + ? object.proposedCertificateRevocationList.map((e: any) => ProposedCertificateRevocation.fromJSON(e)) + : [], + revokedCertificatesList: Array.isArray(object?.revokedCertificatesList) + ? object.revokedCertificatesList.map((e: any) => RevokedCertificates.fromJSON(e)) + : [], + uniqueCertificateList: Array.isArray(object?.uniqueCertificateList) + ? object.uniqueCertificateList.map((e: any) => UniqueCertificate.fromJSON(e)) + : [], + approvedRootCertificates: isSet(object.approvedRootCertificates) + ? ApprovedRootCertificates.fromJSON(object.approvedRootCertificates) + : undefined, + revokedRootCertificates: isSet(object.revokedRootCertificates) + ? RevokedRootCertificates.fromJSON(object.revokedRootCertificates) + : undefined, + approvedCertificatesBySubjectList: Array.isArray(object?.approvedCertificatesBySubjectList) + ? object.approvedCertificatesBySubjectList.map((e: any) => ApprovedCertificatesBySubject.fromJSON(e)) + : [], + rejectedCertificateList: Array.isArray(object?.rejectedCertificateList) + ? object.rejectedCertificateList.map((e: any) => RejectedCertificate.fromJSON(e)) + : [], + PkiRevocationDistributionPointList: Array.isArray(object?.PkiRevocationDistributionPointList) + ? object.PkiRevocationDistributionPointList.map((e: any) => PkiRevocationDistributionPoint.fromJSON(e)) + : [], + PkiRevocationDistributionPointsByIssuerSubjectKeyIDList: + Array.isArray(object?.PkiRevocationDistributionPointsByIssuerSubjectKeyIDList) + ? object.PkiRevocationDistributionPointsByIssuerSubjectKeyIDList.map((e: any) => + PkiRevocationDistributionPointsByIssuerSubjectKeyID.fromJSON(e) + ) + : [], + approvedCertificatesBySubjectKeyIdList: Array.isArray(object?.approvedCertificatesBySubjectKeyIdList) + ? object.approvedCertificatesBySubjectKeyIdList.map((e: any) => ApprovedCertificatesBySubjectKeyId.fromJSON(e)) + : [], + nocRootCertificatesList: Array.isArray(object?.nocRootCertificatesList) + ? object.nocRootCertificatesList.map((e: any) => NocRootCertificates.fromJSON(e)) + : [], + nocIcaCertificatesList: Array.isArray(object?.nocIcaCertificatesList) + ? object.nocIcaCertificatesList.map((e: any) => NocIcaCertificates.fromJSON(e)) + : [], + revokedNocRootCertificatesList: Array.isArray(object?.revokedNocRootCertificatesList) + ? object.revokedNocRootCertificatesList.map((e: any) => RevokedNocRootCertificates.fromJSON(e)) + : [], + nocRootCertificatesByVidAndSkidList: Array.isArray(object?.nocRootCertificatesByVidAndSkidList) + ? object.nocRootCertificatesByVidAndSkidList.map((e: any) => NocRootCertificatesByVidAndSkid.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GenesisState): unknown { + const obj: any = {}; + if (message.approvedCertificatesList) { + obj.approvedCertificatesList = message.approvedCertificatesList.map((e) => + e ? ApprovedCertificates.toJSON(e) : undefined + ); + } else { + obj.approvedCertificatesList = []; + } + if (message.proposedCertificateList) { + obj.proposedCertificateList = message.proposedCertificateList.map((e) => + e ? ProposedCertificate.toJSON(e) : undefined + ); + } else { + obj.proposedCertificateList = []; + } + if (message.childCertificatesList) { + obj.childCertificatesList = message.childCertificatesList.map((e) => e ? ChildCertificates.toJSON(e) : undefined); + } else { + obj.childCertificatesList = []; + } + if (message.proposedCertificateRevocationList) { + obj.proposedCertificateRevocationList = message.proposedCertificateRevocationList.map((e) => + e ? ProposedCertificateRevocation.toJSON(e) : undefined + ); + } else { + obj.proposedCertificateRevocationList = []; + } + if (message.revokedCertificatesList) { + obj.revokedCertificatesList = message.revokedCertificatesList.map((e) => + e ? RevokedCertificates.toJSON(e) : undefined + ); + } else { + obj.revokedCertificatesList = []; + } + if (message.uniqueCertificateList) { + obj.uniqueCertificateList = message.uniqueCertificateList.map((e) => e ? UniqueCertificate.toJSON(e) : undefined); + } else { + obj.uniqueCertificateList = []; + } + message.approvedRootCertificates !== undefined && (obj.approvedRootCertificates = message.approvedRootCertificates + ? ApprovedRootCertificates.toJSON(message.approvedRootCertificates) + : undefined); + message.revokedRootCertificates !== undefined && (obj.revokedRootCertificates = message.revokedRootCertificates + ? RevokedRootCertificates.toJSON(message.revokedRootCertificates) + : undefined); + if (message.approvedCertificatesBySubjectList) { + obj.approvedCertificatesBySubjectList = message.approvedCertificatesBySubjectList.map((e) => + e ? ApprovedCertificatesBySubject.toJSON(e) : undefined + ); + } else { + obj.approvedCertificatesBySubjectList = []; + } + if (message.rejectedCertificateList) { + obj.rejectedCertificateList = message.rejectedCertificateList.map((e) => + e ? RejectedCertificate.toJSON(e) : undefined + ); + } else { + obj.rejectedCertificateList = []; + } + if (message.PkiRevocationDistributionPointList) { + obj.PkiRevocationDistributionPointList = message.PkiRevocationDistributionPointList.map((e) => + e ? PkiRevocationDistributionPoint.toJSON(e) : undefined + ); + } else { + obj.PkiRevocationDistributionPointList = []; + } + if (message.PkiRevocationDistributionPointsByIssuerSubjectKeyIDList) { + obj.PkiRevocationDistributionPointsByIssuerSubjectKeyIDList = message + .PkiRevocationDistributionPointsByIssuerSubjectKeyIDList.map((e) => + e ? PkiRevocationDistributionPointsByIssuerSubjectKeyID.toJSON(e) : undefined + ); + } else { + obj.PkiRevocationDistributionPointsByIssuerSubjectKeyIDList = []; + } + if (message.approvedCertificatesBySubjectKeyIdList) { + obj.approvedCertificatesBySubjectKeyIdList = message.approvedCertificatesBySubjectKeyIdList.map((e) => + e ? ApprovedCertificatesBySubjectKeyId.toJSON(e) : undefined + ); + } else { + obj.approvedCertificatesBySubjectKeyIdList = []; + } + if (message.nocRootCertificatesList) { + obj.nocRootCertificatesList = message.nocRootCertificatesList.map((e) => + e ? NocRootCertificates.toJSON(e) : undefined + ); + } else { + obj.nocRootCertificatesList = []; + } + if (message.nocIcaCertificatesList) { + obj.nocIcaCertificatesList = message.nocIcaCertificatesList.map((e) => + e ? NocIcaCertificates.toJSON(e) : undefined + ); + } else { + obj.nocIcaCertificatesList = []; + } + if (message.revokedNocRootCertificatesList) { + obj.revokedNocRootCertificatesList = message.revokedNocRootCertificatesList.map((e) => + e ? RevokedNocRootCertificates.toJSON(e) : undefined + ); + } else { + obj.revokedNocRootCertificatesList = []; + } + if (message.nocRootCertificatesByVidAndSkidList) { + obj.nocRootCertificatesByVidAndSkidList = message.nocRootCertificatesByVidAndSkidList.map((e) => + e ? NocRootCertificatesByVidAndSkid.toJSON(e) : undefined + ); + } else { + obj.nocRootCertificatesByVidAndSkidList = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GenesisState { + const message = createBaseGenesisState(); + message.approvedCertificatesList = object.approvedCertificatesList?.map((e) => ApprovedCertificates.fromPartial(e)) + || []; + message.proposedCertificateList = object.proposedCertificateList?.map((e) => ProposedCertificate.fromPartial(e)) + || []; + message.childCertificatesList = object.childCertificatesList?.map((e) => ChildCertificates.fromPartial(e)) || []; + message.proposedCertificateRevocationList = + object.proposedCertificateRevocationList?.map((e) => ProposedCertificateRevocation.fromPartial(e)) || []; + message.revokedCertificatesList = object.revokedCertificatesList?.map((e) => RevokedCertificates.fromPartial(e)) + || []; + message.uniqueCertificateList = object.uniqueCertificateList?.map((e) => UniqueCertificate.fromPartial(e)) || []; + message.approvedRootCertificates = + (object.approvedRootCertificates !== undefined && object.approvedRootCertificates !== null) + ? ApprovedRootCertificates.fromPartial(object.approvedRootCertificates) + : undefined; + message.revokedRootCertificates = + (object.revokedRootCertificates !== undefined && object.revokedRootCertificates !== null) + ? RevokedRootCertificates.fromPartial(object.revokedRootCertificates) + : undefined; + message.approvedCertificatesBySubjectList = + object.approvedCertificatesBySubjectList?.map((e) => ApprovedCertificatesBySubject.fromPartial(e)) || []; + message.rejectedCertificateList = object.rejectedCertificateList?.map((e) => RejectedCertificate.fromPartial(e)) + || []; + message.PkiRevocationDistributionPointList = + object.PkiRevocationDistributionPointList?.map((e) => PkiRevocationDistributionPoint.fromPartial(e)) || []; + message.PkiRevocationDistributionPointsByIssuerSubjectKeyIDList = + object.PkiRevocationDistributionPointsByIssuerSubjectKeyIDList?.map((e) => + PkiRevocationDistributionPointsByIssuerSubjectKeyID.fromPartial(e) + ) || []; + message.approvedCertificatesBySubjectKeyIdList = + object.approvedCertificatesBySubjectKeyIdList?.map((e) => ApprovedCertificatesBySubjectKeyId.fromPartial(e)) + || []; + message.nocRootCertificatesList = object.nocRootCertificatesList?.map((e) => NocRootCertificates.fromPartial(e)) + || []; + message.nocIcaCertificatesList = object.nocIcaCertificatesList?.map((e) => NocIcaCertificates.fromPartial(e)) || []; + message.revokedNocRootCertificatesList = + object.revokedNocRootCertificatesList?.map((e) => RevokedNocRootCertificates.fromPartial(e)) || []; + message.nocRootCertificatesByVidAndSkidList = + object.nocRootCertificatesByVidAndSkidList?.map((e) => NocRootCertificatesByVidAndSkid.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/grant.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/grant.ts new file mode 100644 index 000000000..935b67f32 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/grant.ts @@ -0,0 +1,125 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface Grant { + address: string; + /** number of nanoseconds elapsed since January 1, 1970 UTC */ + time: number; + info: string; +} + +function createBaseGrant(): Grant { + return { address: "", time: 0, info: "" }; +} + +export const Grant = { + encode(message: Grant, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + if (message.time !== 0) { + writer.uint32(16).int64(message.time); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Grant { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGrant(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + case 2: + message.time = longToNumber(reader.int64() as Long); + break; + case 3: + message.info = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Grant { + return { + address: isSet(object.address) ? String(object.address) : "", + time: isSet(object.time) ? Number(object.time) : 0, + info: isSet(object.info) ? String(object.info) : "", + }; + }, + + toJSON(message: Grant): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + message.time !== undefined && (obj.time = Math.round(message.time)); + message.info !== undefined && (obj.info = message.info); + return obj; + }, + + fromPartial, I>>(object: I): Grant { + const message = createBaseGrant(); + message.address = object.address ?? ""; + message.time = object.time ?? 0; + message.info = object.info ?? ""; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/noc_certificates.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/noc_certificates.ts new file mode 100644 index 000000000..199f4a0d4 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/noc_certificates.ts @@ -0,0 +1,87 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Certificate } from "./certificate"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface NocCertificates { + vid: number; + certs: Certificate[]; +} + +function createBaseNocCertificates(): NocCertificates { + return { vid: 0, certs: [] }; +} + +export const NocCertificates = { + encode(message: NocCertificates, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + for (const v of message.certs) { + Certificate.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): NocCertificates { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseNocCertificates(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.certs.push(Certificate.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): NocCertificates { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + certs: Array.isArray(object?.certs) ? object.certs.map((e: any) => Certificate.fromJSON(e)) : [], + }; + }, + + toJSON(message: NocCertificates): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + if (message.certs) { + obj.certs = message.certs.map((e) => e ? Certificate.toJSON(e) : undefined); + } else { + obj.certs = []; + } + return obj; + }, + + fromPartial, I>>(object: I): NocCertificates { + const message = createBaseNocCertificates(); + message.vid = object.vid ?? 0; + message.certs = object.certs?.map((e) => Certificate.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/noc_ica_certificates.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/noc_ica_certificates.ts new file mode 100644 index 000000000..3707398f3 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/noc_ica_certificates.ts @@ -0,0 +1,87 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Certificate } from "./certificate"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface NocIcaCertificates { + vid: number; + certs: Certificate[]; +} + +function createBaseNocIcaCertificates(): NocIcaCertificates { + return { vid: 0, certs: [] }; +} + +export const NocIcaCertificates = { + encode(message: NocIcaCertificates, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + for (const v of message.certs) { + Certificate.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): NocIcaCertificates { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseNocIcaCertificates(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.certs.push(Certificate.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): NocIcaCertificates { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + certs: Array.isArray(object?.certs) ? object.certs.map((e: any) => Certificate.fromJSON(e)) : [], + }; + }, + + toJSON(message: NocIcaCertificates): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + if (message.certs) { + obj.certs = message.certs.map((e) => e ? Certificate.toJSON(e) : undefined); + } else { + obj.certs = []; + } + return obj; + }, + + fromPartial, I>>(object: I): NocIcaCertificates { + const message = createBaseNocIcaCertificates(); + message.vid = object.vid ?? 0; + message.certs = object.certs?.map((e) => Certificate.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates.ts new file mode 100644 index 000000000..312b77fdd --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates.ts @@ -0,0 +1,87 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Certificate } from "./certificate"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface NocRootCertificates { + vid: number; + certs: Certificate[]; +} + +function createBaseNocRootCertificates(): NocRootCertificates { + return { vid: 0, certs: [] }; +} + +export const NocRootCertificates = { + encode(message: NocRootCertificates, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + for (const v of message.certs) { + Certificate.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): NocRootCertificates { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseNocRootCertificates(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.certs.push(Certificate.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): NocRootCertificates { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + certs: Array.isArray(object?.certs) ? object.certs.map((e: any) => Certificate.fromJSON(e)) : [], + }; + }, + + toJSON(message: NocRootCertificates): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + if (message.certs) { + obj.certs = message.certs.map((e) => e ? Certificate.toJSON(e) : undefined); + } else { + obj.certs = []; + } + return obj; + }, + + fromPartial, I>>(object: I): NocRootCertificates { + const message = createBaseNocRootCertificates(); + message.vid = object.vid ?? 0; + message.certs = object.certs?.map((e) => Certificate.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates_by_vid_and_skid.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates_by_vid_and_skid.ts new file mode 100644 index 000000000..9091591a7 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates_by_vid_and_skid.ts @@ -0,0 +1,109 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Certificate } from "./certificate"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface NocRootCertificatesByVidAndSkid { + vid: number; + subjectKeyId: string; + certs: Certificate[]; + tq: number; +} + +function createBaseNocRootCertificatesByVidAndSkid(): NocRootCertificatesByVidAndSkid { + return { vid: 0, subjectKeyId: "", certs: [], tq: 0 }; +} + +export const NocRootCertificatesByVidAndSkid = { + encode(message: NocRootCertificatesByVidAndSkid, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + for (const v of message.certs) { + Certificate.encode(v!, writer.uint32(26).fork()).ldelim(); + } + if (message.tq !== 0) { + writer.uint32(37).float(message.tq); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): NocRootCertificatesByVidAndSkid { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseNocRootCertificatesByVidAndSkid(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + case 3: + message.certs.push(Certificate.decode(reader, reader.uint32())); + break; + case 4: + message.tq = reader.float(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): NocRootCertificatesByVidAndSkid { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + certs: Array.isArray(object?.certs) ? object.certs.map((e: any) => Certificate.fromJSON(e)) : [], + tq: isSet(object.tq) ? Number(object.tq) : 0, + }; + }, + + toJSON(message: NocRootCertificatesByVidAndSkid): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + if (message.certs) { + obj.certs = message.certs.map((e) => e ? Certificate.toJSON(e) : undefined); + } else { + obj.certs = []; + } + message.tq !== undefined && (obj.tq = message.tq); + return obj; + }, + + fromPartial, I>>( + object: I, + ): NocRootCertificatesByVidAndSkid { + const message = createBaseNocRootCertificatesByVidAndSkid(); + message.vid = object.vid ?? 0; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.certs = object.certs?.map((e) => Certificate.fromPartial(e)) || []; + message.tq = object.tq ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_point.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_point.ts new file mode 100644 index 000000000..db27f8354 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_point.ts @@ -0,0 +1,240 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface PkiRevocationDistributionPoint { + vid: number; + label: string; + issuerSubjectKeyID: string; + pid: number; + isPAA: boolean; + crlSignerCertificate: string; + dataURL: string; + dataFileSize: number; + dataDigest: string; + dataDigestType: number; + revocationType: number; + schemaVersion: number; + crlSignerDelegator: string; +} + +function createBasePkiRevocationDistributionPoint(): PkiRevocationDistributionPoint { + return { + vid: 0, + label: "", + issuerSubjectKeyID: "", + pid: 0, + isPAA: false, + crlSignerCertificate: "", + dataURL: "", + dataFileSize: 0, + dataDigest: "", + dataDigestType: 0, + revocationType: 0, + schemaVersion: 0, + crlSignerDelegator: "", + }; +} + +export const PkiRevocationDistributionPoint = { + encode(message: PkiRevocationDistributionPoint, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.label !== "") { + writer.uint32(18).string(message.label); + } + if (message.issuerSubjectKeyID !== "") { + writer.uint32(26).string(message.issuerSubjectKeyID); + } + if (message.pid !== 0) { + writer.uint32(32).int32(message.pid); + } + if (message.isPAA === true) { + writer.uint32(40).bool(message.isPAA); + } + if (message.crlSignerCertificate !== "") { + writer.uint32(50).string(message.crlSignerCertificate); + } + if (message.dataURL !== "") { + writer.uint32(58).string(message.dataURL); + } + if (message.dataFileSize !== 0) { + writer.uint32(64).uint64(message.dataFileSize); + } + if (message.dataDigest !== "") { + writer.uint32(74).string(message.dataDigest); + } + if (message.dataDigestType !== 0) { + writer.uint32(80).uint32(message.dataDigestType); + } + if (message.revocationType !== 0) { + writer.uint32(88).uint32(message.revocationType); + } + if (message.schemaVersion !== 0) { + writer.uint32(96).uint32(message.schemaVersion); + } + if (message.crlSignerDelegator !== "") { + writer.uint32(106).string(message.crlSignerDelegator); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PkiRevocationDistributionPoint { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePkiRevocationDistributionPoint(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.label = reader.string(); + break; + case 3: + message.issuerSubjectKeyID = reader.string(); + break; + case 4: + message.pid = reader.int32(); + break; + case 5: + message.isPAA = reader.bool(); + break; + case 6: + message.crlSignerCertificate = reader.string(); + break; + case 7: + message.dataURL = reader.string(); + break; + case 8: + message.dataFileSize = longToNumber(reader.uint64() as Long); + break; + case 9: + message.dataDigest = reader.string(); + break; + case 10: + message.dataDigestType = reader.uint32(); + break; + case 11: + message.revocationType = reader.uint32(); + break; + case 12: + message.schemaVersion = reader.uint32(); + break; + case 13: + message.crlSignerDelegator = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PkiRevocationDistributionPoint { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + label: isSet(object.label) ? String(object.label) : "", + issuerSubjectKeyID: isSet(object.issuerSubjectKeyID) ? String(object.issuerSubjectKeyID) : "", + pid: isSet(object.pid) ? Number(object.pid) : 0, + isPAA: isSet(object.isPAA) ? Boolean(object.isPAA) : false, + crlSignerCertificate: isSet(object.crlSignerCertificate) ? String(object.crlSignerCertificate) : "", + dataURL: isSet(object.dataURL) ? String(object.dataURL) : "", + dataFileSize: isSet(object.dataFileSize) ? Number(object.dataFileSize) : 0, + dataDigest: isSet(object.dataDigest) ? String(object.dataDigest) : "", + dataDigestType: isSet(object.dataDigestType) ? Number(object.dataDigestType) : 0, + revocationType: isSet(object.revocationType) ? Number(object.revocationType) : 0, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + crlSignerDelegator: isSet(object.crlSignerDelegator) ? String(object.crlSignerDelegator) : "", + }; + }, + + toJSON(message: PkiRevocationDistributionPoint): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.label !== undefined && (obj.label = message.label); + message.issuerSubjectKeyID !== undefined && (obj.issuerSubjectKeyID = message.issuerSubjectKeyID); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.isPAA !== undefined && (obj.isPAA = message.isPAA); + message.crlSignerCertificate !== undefined && (obj.crlSignerCertificate = message.crlSignerCertificate); + message.dataURL !== undefined && (obj.dataURL = message.dataURL); + message.dataFileSize !== undefined && (obj.dataFileSize = Math.round(message.dataFileSize)); + message.dataDigest !== undefined && (obj.dataDigest = message.dataDigest); + message.dataDigestType !== undefined && (obj.dataDigestType = Math.round(message.dataDigestType)); + message.revocationType !== undefined && (obj.revocationType = Math.round(message.revocationType)); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + message.crlSignerDelegator !== undefined && (obj.crlSignerDelegator = message.crlSignerDelegator); + return obj; + }, + + fromPartial, I>>( + object: I, + ): PkiRevocationDistributionPoint { + const message = createBasePkiRevocationDistributionPoint(); + message.vid = object.vid ?? 0; + message.label = object.label ?? ""; + message.issuerSubjectKeyID = object.issuerSubjectKeyID ?? ""; + message.pid = object.pid ?? 0; + message.isPAA = object.isPAA ?? false; + message.crlSignerCertificate = object.crlSignerCertificate ?? ""; + message.dataURL = object.dataURL ?? ""; + message.dataFileSize = object.dataFileSize ?? 0; + message.dataDigest = object.dataDigest ?? ""; + message.dataDigestType = object.dataDigestType ?? 0; + message.revocationType = object.revocationType ?? 0; + message.schemaVersion = object.schemaVersion ?? 0; + message.crlSignerDelegator = object.crlSignerDelegator ?? ""; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_points_by_issuer_subject_key_id.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_points_by_issuer_subject_key_id.ts new file mode 100644 index 000000000..c98083546 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_points_by_issuer_subject_key_id.ts @@ -0,0 +1,94 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { PkiRevocationDistributionPoint } from "./pki_revocation_distribution_point"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface PkiRevocationDistributionPointsByIssuerSubjectKeyID { + issuerSubjectKeyID: string; + points: PkiRevocationDistributionPoint[]; +} + +function createBasePkiRevocationDistributionPointsByIssuerSubjectKeyID(): PkiRevocationDistributionPointsByIssuerSubjectKeyID { + return { issuerSubjectKeyID: "", points: [] }; +} + +export const PkiRevocationDistributionPointsByIssuerSubjectKeyID = { + encode( + message: PkiRevocationDistributionPointsByIssuerSubjectKeyID, + writer: _m0.Writer = _m0.Writer.create(), + ): _m0.Writer { + if (message.issuerSubjectKeyID !== "") { + writer.uint32(10).string(message.issuerSubjectKeyID); + } + for (const v of message.points) { + PkiRevocationDistributionPoint.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PkiRevocationDistributionPointsByIssuerSubjectKeyID { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePkiRevocationDistributionPointsByIssuerSubjectKeyID(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.issuerSubjectKeyID = reader.string(); + break; + case 2: + message.points.push(PkiRevocationDistributionPoint.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PkiRevocationDistributionPointsByIssuerSubjectKeyID { + return { + issuerSubjectKeyID: isSet(object.issuerSubjectKeyID) ? String(object.issuerSubjectKeyID) : "", + points: Array.isArray(object?.points) + ? object.points.map((e: any) => PkiRevocationDistributionPoint.fromJSON(e)) + : [], + }; + }, + + toJSON(message: PkiRevocationDistributionPointsByIssuerSubjectKeyID): unknown { + const obj: any = {}; + message.issuerSubjectKeyID !== undefined && (obj.issuerSubjectKeyID = message.issuerSubjectKeyID); + if (message.points) { + obj.points = message.points.map((e) => e ? PkiRevocationDistributionPoint.toJSON(e) : undefined); + } else { + obj.points = []; + } + return obj; + }, + + fromPartial, I>>( + object: I, + ): PkiRevocationDistributionPointsByIssuerSubjectKeyID { + const message = createBasePkiRevocationDistributionPointsByIssuerSubjectKeyID(); + message.issuerSubjectKeyID = object.issuerSubjectKeyID ?? ""; + message.points = object.points?.map((e) => PkiRevocationDistributionPoint.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/proposed_certificate.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/proposed_certificate.ts new file mode 100644 index 000000000..e2d4dfc84 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/proposed_certificate.ts @@ -0,0 +1,193 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Grant } from "./grant"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface ProposedCertificate { + subject: string; + subjectKeyId: string; + pemCert: string; + serialNumber: string; + owner: string; + approvals: Grant[]; + subjectAsText: string; + rejects: Grant[]; + vid: number; + certSchemaVersion: number; + schemaVersion: number; +} + +function createBaseProposedCertificate(): ProposedCertificate { + return { + subject: "", + subjectKeyId: "", + pemCert: "", + serialNumber: "", + owner: "", + approvals: [], + subjectAsText: "", + rejects: [], + vid: 0, + certSchemaVersion: 0, + schemaVersion: 0, + }; +} + +export const ProposedCertificate = { + encode(message: ProposedCertificate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + if (message.pemCert !== "") { + writer.uint32(26).string(message.pemCert); + } + if (message.serialNumber !== "") { + writer.uint32(34).string(message.serialNumber); + } + if (message.owner !== "") { + writer.uint32(42).string(message.owner); + } + for (const v of message.approvals) { + Grant.encode(v!, writer.uint32(50).fork()).ldelim(); + } + if (message.subjectAsText !== "") { + writer.uint32(58).string(message.subjectAsText); + } + for (const v of message.rejects) { + Grant.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.vid !== 0) { + writer.uint32(72).int32(message.vid); + } + if (message.certSchemaVersion !== 0) { + writer.uint32(80).uint32(message.certSchemaVersion); + } + if (message.schemaVersion !== 0) { + writer.uint32(88).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProposedCertificate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProposedCertificate(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + case 3: + message.pemCert = reader.string(); + break; + case 4: + message.serialNumber = reader.string(); + break; + case 5: + message.owner = reader.string(); + break; + case 6: + message.approvals.push(Grant.decode(reader, reader.uint32())); + break; + case 7: + message.subjectAsText = reader.string(); + break; + case 8: + message.rejects.push(Grant.decode(reader, reader.uint32())); + break; + case 9: + message.vid = reader.int32(); + break; + case 10: + message.certSchemaVersion = reader.uint32(); + break; + case 11: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ProposedCertificate { + return { + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + pemCert: isSet(object.pemCert) ? String(object.pemCert) : "", + serialNumber: isSet(object.serialNumber) ? String(object.serialNumber) : "", + owner: isSet(object.owner) ? String(object.owner) : "", + approvals: Array.isArray(object?.approvals) ? object.approvals.map((e: any) => Grant.fromJSON(e)) : [], + subjectAsText: isSet(object.subjectAsText) ? String(object.subjectAsText) : "", + rejects: Array.isArray(object?.rejects) ? object.rejects.map((e: any) => Grant.fromJSON(e)) : [], + vid: isSet(object.vid) ? Number(object.vid) : 0, + certSchemaVersion: isSet(object.certSchemaVersion) ? Number(object.certSchemaVersion) : 0, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: ProposedCertificate): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + message.pemCert !== undefined && (obj.pemCert = message.pemCert); + message.serialNumber !== undefined && (obj.serialNumber = message.serialNumber); + message.owner !== undefined && (obj.owner = message.owner); + if (message.approvals) { + obj.approvals = message.approvals.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.approvals = []; + } + message.subjectAsText !== undefined && (obj.subjectAsText = message.subjectAsText); + if (message.rejects) { + obj.rejects = message.rejects.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.rejects = []; + } + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.certSchemaVersion !== undefined && (obj.certSchemaVersion = Math.round(message.certSchemaVersion)); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): ProposedCertificate { + const message = createBaseProposedCertificate(); + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.pemCert = object.pemCert ?? ""; + message.serialNumber = object.serialNumber ?? ""; + message.owner = object.owner ?? ""; + message.approvals = object.approvals?.map((e) => Grant.fromPartial(e)) || []; + message.subjectAsText = object.subjectAsText ?? ""; + message.rejects = object.rejects?.map((e) => Grant.fromPartial(e)) || []; + message.vid = object.vid ?? 0; + message.certSchemaVersion = object.certSchemaVersion ?? 0; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/proposed_certificate_revocation.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/proposed_certificate_revocation.ts new file mode 100644 index 000000000..96656e36a --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/proposed_certificate_revocation.ts @@ -0,0 +1,147 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Grant } from "./grant"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface ProposedCertificateRevocation { + subject: string; + subjectKeyId: string; + approvals: Grant[]; + subjectAsText: string; + serialNumber: string; + revokeChild: boolean; + schemaVersion: number; +} + +function createBaseProposedCertificateRevocation(): ProposedCertificateRevocation { + return { + subject: "", + subjectKeyId: "", + approvals: [], + subjectAsText: "", + serialNumber: "", + revokeChild: false, + schemaVersion: 0, + }; +} + +export const ProposedCertificateRevocation = { + encode(message: ProposedCertificateRevocation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + for (const v of message.approvals) { + Grant.encode(v!, writer.uint32(26).fork()).ldelim(); + } + if (message.subjectAsText !== "") { + writer.uint32(34).string(message.subjectAsText); + } + if (message.serialNumber !== "") { + writer.uint32(42).string(message.serialNumber); + } + if (message.revokeChild === true) { + writer.uint32(48).bool(message.revokeChild); + } + if (message.schemaVersion !== 0) { + writer.uint32(56).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProposedCertificateRevocation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProposedCertificateRevocation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + case 3: + message.approvals.push(Grant.decode(reader, reader.uint32())); + break; + case 4: + message.subjectAsText = reader.string(); + break; + case 5: + message.serialNumber = reader.string(); + break; + case 6: + message.revokeChild = reader.bool(); + break; + case 7: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ProposedCertificateRevocation { + return { + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + approvals: Array.isArray(object?.approvals) ? object.approvals.map((e: any) => Grant.fromJSON(e)) : [], + subjectAsText: isSet(object.subjectAsText) ? String(object.subjectAsText) : "", + serialNumber: isSet(object.serialNumber) ? String(object.serialNumber) : "", + revokeChild: isSet(object.revokeChild) ? Boolean(object.revokeChild) : false, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: ProposedCertificateRevocation): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + if (message.approvals) { + obj.approvals = message.approvals.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.approvals = []; + } + message.subjectAsText !== undefined && (obj.subjectAsText = message.subjectAsText); + message.serialNumber !== undefined && (obj.serialNumber = message.serialNumber); + message.revokeChild !== undefined && (obj.revokeChild = message.revokeChild); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): ProposedCertificateRevocation { + const message = createBaseProposedCertificateRevocation(); + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.approvals = object.approvals?.map((e) => Grant.fromPartial(e)) || []; + message.subjectAsText = object.subjectAsText ?? ""; + message.serialNumber = object.serialNumber ?? ""; + message.revokeChild = object.revokeChild ?? false; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/query.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/query.ts new file mode 100644 index 000000000..9471c3597 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/query.ts @@ -0,0 +1,3469 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { PageRequest, PageResponse } from "../../../cosmos/base/query/v1beta1/pagination"; +import { ApprovedCertificates } from "./approved_certificates"; +import { ApprovedCertificatesBySubject } from "./approved_certificates_by_subject"; +import { ApprovedRootCertificates } from "./approved_root_certificates"; +import { ChildCertificates } from "./child_certificates"; +import { NocIcaCertificates } from "./noc_ica_certificates"; +import { NocRootCertificates } from "./noc_root_certificates"; +import { NocRootCertificatesByVidAndSkid } from "./noc_root_certificates_by_vid_and_skid"; +import { PkiRevocationDistributionPoint } from "./pki_revocation_distribution_point"; +import { PkiRevocationDistributionPointsByIssuerSubjectKeyID } from "./pki_revocation_distribution_points_by_issuer_subject_key_id"; +import { ProposedCertificate } from "./proposed_certificate"; +import { ProposedCertificateRevocation } from "./proposed_certificate_revocation"; +import { RejectedCertificate } from "./rejected_certificate"; +import { RevokedCertificates } from "./revoked_certificates"; +import { RevokedNocRootCertificates } from "./revoked_noc_root_certificates"; +import { RevokedRootCertificates } from "./revoked_root_certificates"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface QueryGetApprovedCertificatesRequest { + subject: string; + subjectKeyId: string; +} + +export interface QueryGetApprovedCertificatesResponse { + approvedCertificates: ApprovedCertificates | undefined; +} + +export interface QueryAllApprovedCertificatesRequest { + pagination: PageRequest | undefined; + subjectKeyId: string; +} + +export interface QueryAllApprovedCertificatesResponse { + approvedCertificates: ApprovedCertificates[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetProposedCertificateRequest { + subject: string; + subjectKeyId: string; +} + +export interface QueryGetProposedCertificateResponse { + proposedCertificate: ProposedCertificate | undefined; +} + +export interface QueryAllProposedCertificateRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllProposedCertificateResponse { + proposedCertificate: ProposedCertificate[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetChildCertificatesRequest { + issuer: string; + authorityKeyId: string; +} + +export interface QueryGetChildCertificatesResponse { + childCertificates: ChildCertificates | undefined; +} + +export interface QueryGetProposedCertificateRevocationRequest { + subject: string; + subjectKeyId: string; + serialNumber: string; +} + +export interface QueryGetProposedCertificateRevocationResponse { + proposedCertificateRevocation: ProposedCertificateRevocation | undefined; +} + +export interface QueryAllProposedCertificateRevocationRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllProposedCertificateRevocationResponse { + proposedCertificateRevocation: ProposedCertificateRevocation[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetRevokedCertificatesRequest { + subject: string; + subjectKeyId: string; +} + +export interface QueryGetRevokedCertificatesResponse { + revokedCertificates: RevokedCertificates | undefined; +} + +export interface QueryAllRevokedCertificatesRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllRevokedCertificatesResponse { + revokedCertificates: RevokedCertificates[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetApprovedRootCertificatesRequest { +} + +export interface QueryGetApprovedRootCertificatesResponse { + approvedRootCertificates: ApprovedRootCertificates | undefined; +} + +export interface QueryGetRevokedRootCertificatesRequest { +} + +export interface QueryGetRevokedRootCertificatesResponse { + revokedRootCertificates: RevokedRootCertificates | undefined; +} + +export interface QueryGetApprovedCertificatesBySubjectRequest { + subject: string; +} + +export interface QueryGetApprovedCertificatesBySubjectResponse { + approvedCertificatesBySubject: ApprovedCertificatesBySubject | undefined; +} + +export interface QueryGetRejectedCertificatesRequest { + subject: string; + subjectKeyId: string; +} + +export interface QueryGetRejectedCertificatesResponse { + rejectedCertificate: RejectedCertificate | undefined; +} + +export interface QueryAllRejectedCertificatesRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllRejectedCertificatesResponse { + rejectedCertificate: RejectedCertificate[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetPkiRevocationDistributionPointRequest { + vid: number; + label: string; + issuerSubjectKeyID: string; +} + +export interface QueryGetPkiRevocationDistributionPointResponse { + PkiRevocationDistributionPoint: PkiRevocationDistributionPoint | undefined; +} + +export interface QueryAllPkiRevocationDistributionPointRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllPkiRevocationDistributionPointResponse { + PkiRevocationDistributionPoint: PkiRevocationDistributionPoint[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest { + issuerSubjectKeyID: string; +} + +export interface QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse { + pkiRevocationDistributionPointsByIssuerSubjectKeyID: PkiRevocationDistributionPointsByIssuerSubjectKeyID | undefined; +} + +export interface QueryGetNocRootCertificatesRequest { + vid: number; +} + +export interface QueryGetNocRootCertificatesResponse { + nocRootCertificates: NocRootCertificates | undefined; +} + +export interface QueryAllNocRootCertificatesRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllNocRootCertificatesResponse { + nocRootCertificates: NocRootCertificates[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetNocIcaCertificatesRequest { + vid: number; +} + +export interface QueryGetNocIcaCertificatesResponse { + nocIcaCertificates: NocIcaCertificates | undefined; +} + +export interface QueryAllNocIcaCertificatesRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllNocIcaCertificatesResponse { + nocIcaCertificates: NocIcaCertificates[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetRevokedNocRootCertificatesRequest { + subject: string; + subjectKeyId: string; +} + +export interface QueryGetRevokedNocRootCertificatesResponse { + revokedNocRootCertificates: RevokedNocRootCertificates | undefined; +} + +export interface QueryAllRevokedNocRootCertificatesRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllRevokedNocRootCertificatesResponse { + revokedNocRootCertificates: RevokedNocRootCertificates[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetNocRootCertificatesByVidAndSkidRequest { + vid: number; + subjectKeyId: string; +} + +export interface QueryGetNocRootCertificatesByVidAndSkidResponse { + nocRootCertificatesByVidAndSkid: NocRootCertificatesByVidAndSkid | undefined; +} + +function createBaseQueryGetApprovedCertificatesRequest(): QueryGetApprovedCertificatesRequest { + return { subject: "", subjectKeyId: "" }; +} + +export const QueryGetApprovedCertificatesRequest = { + encode(message: QueryGetApprovedCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetApprovedCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetApprovedCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetApprovedCertificatesRequest { + return { + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + }; + }, + + toJSON(message: QueryGetApprovedCertificatesRequest): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetApprovedCertificatesRequest { + const message = createBaseQueryGetApprovedCertificatesRequest(); + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + return message; + }, +}; + +function createBaseQueryGetApprovedCertificatesResponse(): QueryGetApprovedCertificatesResponse { + return { approvedCertificates: undefined }; +} + +export const QueryGetApprovedCertificatesResponse = { + encode(message: QueryGetApprovedCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.approvedCertificates !== undefined) { + ApprovedCertificates.encode(message.approvedCertificates, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetApprovedCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetApprovedCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.approvedCertificates = ApprovedCertificates.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetApprovedCertificatesResponse { + return { + approvedCertificates: isSet(object.approvedCertificates) + ? ApprovedCertificates.fromJSON(object.approvedCertificates) + : undefined, + }; + }, + + toJSON(message: QueryGetApprovedCertificatesResponse): unknown { + const obj: any = {}; + message.approvedCertificates !== undefined && (obj.approvedCertificates = message.approvedCertificates + ? ApprovedCertificates.toJSON(message.approvedCertificates) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetApprovedCertificatesResponse { + const message = createBaseQueryGetApprovedCertificatesResponse(); + message.approvedCertificates = (object.approvedCertificates !== undefined && object.approvedCertificates !== null) + ? ApprovedCertificates.fromPartial(object.approvedCertificates) + : undefined; + return message; + }, +}; + +function createBaseQueryAllApprovedCertificatesRequest(): QueryAllApprovedCertificatesRequest { + return { pagination: undefined, subjectKeyId: "" }; +} + +export const QueryAllApprovedCertificatesRequest = { + encode(message: QueryAllApprovedCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllApprovedCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllApprovedCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllApprovedCertificatesRequest { + return { + pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined, + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + }; + }, + + toJSON(message: QueryAllApprovedCertificatesRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllApprovedCertificatesRequest { + const message = createBaseQueryAllApprovedCertificatesRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + message.subjectKeyId = object.subjectKeyId ?? ""; + return message; + }, +}; + +function createBaseQueryAllApprovedCertificatesResponse(): QueryAllApprovedCertificatesResponse { + return { approvedCertificates: [], pagination: undefined }; +} + +export const QueryAllApprovedCertificatesResponse = { + encode(message: QueryAllApprovedCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.approvedCertificates) { + ApprovedCertificates.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllApprovedCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllApprovedCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.approvedCertificates.push(ApprovedCertificates.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllApprovedCertificatesResponse { + return { + approvedCertificates: Array.isArray(object?.approvedCertificates) + ? object.approvedCertificates.map((e: any) => ApprovedCertificates.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllApprovedCertificatesResponse): unknown { + const obj: any = {}; + if (message.approvedCertificates) { + obj.approvedCertificates = message.approvedCertificates.map((e) => + e ? ApprovedCertificates.toJSON(e) : undefined + ); + } else { + obj.approvedCertificates = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllApprovedCertificatesResponse { + const message = createBaseQueryAllApprovedCertificatesResponse(); + message.approvedCertificates = object.approvedCertificates?.map((e) => ApprovedCertificates.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetProposedCertificateRequest(): QueryGetProposedCertificateRequest { + return { subject: "", subjectKeyId: "" }; +} + +export const QueryGetProposedCertificateRequest = { + encode(message: QueryGetProposedCertificateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetProposedCertificateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetProposedCertificateRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetProposedCertificateRequest { + return { + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + }; + }, + + toJSON(message: QueryGetProposedCertificateRequest): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetProposedCertificateRequest { + const message = createBaseQueryGetProposedCertificateRequest(); + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + return message; + }, +}; + +function createBaseQueryGetProposedCertificateResponse(): QueryGetProposedCertificateResponse { + return { proposedCertificate: undefined }; +} + +export const QueryGetProposedCertificateResponse = { + encode(message: QueryGetProposedCertificateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.proposedCertificate !== undefined) { + ProposedCertificate.encode(message.proposedCertificate, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetProposedCertificateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetProposedCertificateResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.proposedCertificate = ProposedCertificate.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetProposedCertificateResponse { + return { + proposedCertificate: isSet(object.proposedCertificate) + ? ProposedCertificate.fromJSON(object.proposedCertificate) + : undefined, + }; + }, + + toJSON(message: QueryGetProposedCertificateResponse): unknown { + const obj: any = {}; + message.proposedCertificate !== undefined && (obj.proposedCertificate = message.proposedCertificate + ? ProposedCertificate.toJSON(message.proposedCertificate) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetProposedCertificateResponse { + const message = createBaseQueryGetProposedCertificateResponse(); + message.proposedCertificate = (object.proposedCertificate !== undefined && object.proposedCertificate !== null) + ? ProposedCertificate.fromPartial(object.proposedCertificate) + : undefined; + return message; + }, +}; + +function createBaseQueryAllProposedCertificateRequest(): QueryAllProposedCertificateRequest { + return { pagination: undefined }; +} + +export const QueryAllProposedCertificateRequest = { + encode(message: QueryAllProposedCertificateRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllProposedCertificateRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllProposedCertificateRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllProposedCertificateRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllProposedCertificateRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllProposedCertificateRequest { + const message = createBaseQueryAllProposedCertificateRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllProposedCertificateResponse(): QueryAllProposedCertificateResponse { + return { proposedCertificate: [], pagination: undefined }; +} + +export const QueryAllProposedCertificateResponse = { + encode(message: QueryAllProposedCertificateResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.proposedCertificate) { + ProposedCertificate.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllProposedCertificateResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllProposedCertificateResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.proposedCertificate.push(ProposedCertificate.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllProposedCertificateResponse { + return { + proposedCertificate: Array.isArray(object?.proposedCertificate) + ? object.proposedCertificate.map((e: any) => ProposedCertificate.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllProposedCertificateResponse): unknown { + const obj: any = {}; + if (message.proposedCertificate) { + obj.proposedCertificate = message.proposedCertificate.map((e) => e ? ProposedCertificate.toJSON(e) : undefined); + } else { + obj.proposedCertificate = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllProposedCertificateResponse { + const message = createBaseQueryAllProposedCertificateResponse(); + message.proposedCertificate = object.proposedCertificate?.map((e) => ProposedCertificate.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetChildCertificatesRequest(): QueryGetChildCertificatesRequest { + return { issuer: "", authorityKeyId: "" }; +} + +export const QueryGetChildCertificatesRequest = { + encode(message: QueryGetChildCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.issuer !== "") { + writer.uint32(10).string(message.issuer); + } + if (message.authorityKeyId !== "") { + writer.uint32(18).string(message.authorityKeyId); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetChildCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetChildCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.issuer = reader.string(); + break; + case 2: + message.authorityKeyId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetChildCertificatesRequest { + return { + issuer: isSet(object.issuer) ? String(object.issuer) : "", + authorityKeyId: isSet(object.authorityKeyId) ? String(object.authorityKeyId) : "", + }; + }, + + toJSON(message: QueryGetChildCertificatesRequest): unknown { + const obj: any = {}; + message.issuer !== undefined && (obj.issuer = message.issuer); + message.authorityKeyId !== undefined && (obj.authorityKeyId = message.authorityKeyId); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetChildCertificatesRequest { + const message = createBaseQueryGetChildCertificatesRequest(); + message.issuer = object.issuer ?? ""; + message.authorityKeyId = object.authorityKeyId ?? ""; + return message; + }, +}; + +function createBaseQueryGetChildCertificatesResponse(): QueryGetChildCertificatesResponse { + return { childCertificates: undefined }; +} + +export const QueryGetChildCertificatesResponse = { + encode(message: QueryGetChildCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.childCertificates !== undefined) { + ChildCertificates.encode(message.childCertificates, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetChildCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetChildCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.childCertificates = ChildCertificates.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetChildCertificatesResponse { + return { + childCertificates: isSet(object.childCertificates) + ? ChildCertificates.fromJSON(object.childCertificates) + : undefined, + }; + }, + + toJSON(message: QueryGetChildCertificatesResponse): unknown { + const obj: any = {}; + message.childCertificates !== undefined && (obj.childCertificates = message.childCertificates + ? ChildCertificates.toJSON(message.childCertificates) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetChildCertificatesResponse { + const message = createBaseQueryGetChildCertificatesResponse(); + message.childCertificates = (object.childCertificates !== undefined && object.childCertificates !== null) + ? ChildCertificates.fromPartial(object.childCertificates) + : undefined; + return message; + }, +}; + +function createBaseQueryGetProposedCertificateRevocationRequest(): QueryGetProposedCertificateRevocationRequest { + return { subject: "", subjectKeyId: "", serialNumber: "" }; +} + +export const QueryGetProposedCertificateRevocationRequest = { + encode(message: QueryGetProposedCertificateRevocationRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + if (message.serialNumber !== "") { + writer.uint32(26).string(message.serialNumber); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetProposedCertificateRevocationRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetProposedCertificateRevocationRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + case 3: + message.serialNumber = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetProposedCertificateRevocationRequest { + return { + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + serialNumber: isSet(object.serialNumber) ? String(object.serialNumber) : "", + }; + }, + + toJSON(message: QueryGetProposedCertificateRevocationRequest): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + message.serialNumber !== undefined && (obj.serialNumber = message.serialNumber); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetProposedCertificateRevocationRequest { + const message = createBaseQueryGetProposedCertificateRevocationRequest(); + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.serialNumber = object.serialNumber ?? ""; + return message; + }, +}; + +function createBaseQueryGetProposedCertificateRevocationResponse(): QueryGetProposedCertificateRevocationResponse { + return { proposedCertificateRevocation: undefined }; +} + +export const QueryGetProposedCertificateRevocationResponse = { + encode(message: QueryGetProposedCertificateRevocationResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.proposedCertificateRevocation !== undefined) { + ProposedCertificateRevocation.encode(message.proposedCertificateRevocation, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetProposedCertificateRevocationResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetProposedCertificateRevocationResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.proposedCertificateRevocation = ProposedCertificateRevocation.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetProposedCertificateRevocationResponse { + return { + proposedCertificateRevocation: isSet(object.proposedCertificateRevocation) + ? ProposedCertificateRevocation.fromJSON(object.proposedCertificateRevocation) + : undefined, + }; + }, + + toJSON(message: QueryGetProposedCertificateRevocationResponse): unknown { + const obj: any = {}; + message.proposedCertificateRevocation !== undefined + && (obj.proposedCertificateRevocation = message.proposedCertificateRevocation + ? ProposedCertificateRevocation.toJSON(message.proposedCertificateRevocation) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetProposedCertificateRevocationResponse { + const message = createBaseQueryGetProposedCertificateRevocationResponse(); + message.proposedCertificateRevocation = + (object.proposedCertificateRevocation !== undefined && object.proposedCertificateRevocation !== null) + ? ProposedCertificateRevocation.fromPartial(object.proposedCertificateRevocation) + : undefined; + return message; + }, +}; + +function createBaseQueryAllProposedCertificateRevocationRequest(): QueryAllProposedCertificateRevocationRequest { + return { pagination: undefined }; +} + +export const QueryAllProposedCertificateRevocationRequest = { + encode(message: QueryAllProposedCertificateRevocationRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllProposedCertificateRevocationRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllProposedCertificateRevocationRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllProposedCertificateRevocationRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllProposedCertificateRevocationRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllProposedCertificateRevocationRequest { + const message = createBaseQueryAllProposedCertificateRevocationRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllProposedCertificateRevocationResponse(): QueryAllProposedCertificateRevocationResponse { + return { proposedCertificateRevocation: [], pagination: undefined }; +} + +export const QueryAllProposedCertificateRevocationResponse = { + encode(message: QueryAllProposedCertificateRevocationResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.proposedCertificateRevocation) { + ProposedCertificateRevocation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllProposedCertificateRevocationResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllProposedCertificateRevocationResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.proposedCertificateRevocation.push(ProposedCertificateRevocation.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllProposedCertificateRevocationResponse { + return { + proposedCertificateRevocation: Array.isArray(object?.proposedCertificateRevocation) + ? object.proposedCertificateRevocation.map((e: any) => ProposedCertificateRevocation.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllProposedCertificateRevocationResponse): unknown { + const obj: any = {}; + if (message.proposedCertificateRevocation) { + obj.proposedCertificateRevocation = message.proposedCertificateRevocation.map((e) => + e ? ProposedCertificateRevocation.toJSON(e) : undefined + ); + } else { + obj.proposedCertificateRevocation = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllProposedCertificateRevocationResponse { + const message = createBaseQueryAllProposedCertificateRevocationResponse(); + message.proposedCertificateRevocation = + object.proposedCertificateRevocation?.map((e) => ProposedCertificateRevocation.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetRevokedCertificatesRequest(): QueryGetRevokedCertificatesRequest { + return { subject: "", subjectKeyId: "" }; +} + +export const QueryGetRevokedCertificatesRequest = { + encode(message: QueryGetRevokedCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRevokedCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRevokedCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRevokedCertificatesRequest { + return { + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + }; + }, + + toJSON(message: QueryGetRevokedCertificatesRequest): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRevokedCertificatesRequest { + const message = createBaseQueryGetRevokedCertificatesRequest(); + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + return message; + }, +}; + +function createBaseQueryGetRevokedCertificatesResponse(): QueryGetRevokedCertificatesResponse { + return { revokedCertificates: undefined }; +} + +export const QueryGetRevokedCertificatesResponse = { + encode(message: QueryGetRevokedCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.revokedCertificates !== undefined) { + RevokedCertificates.encode(message.revokedCertificates, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRevokedCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRevokedCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.revokedCertificates = RevokedCertificates.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRevokedCertificatesResponse { + return { + revokedCertificates: isSet(object.revokedCertificates) + ? RevokedCertificates.fromJSON(object.revokedCertificates) + : undefined, + }; + }, + + toJSON(message: QueryGetRevokedCertificatesResponse): unknown { + const obj: any = {}; + message.revokedCertificates !== undefined && (obj.revokedCertificates = message.revokedCertificates + ? RevokedCertificates.toJSON(message.revokedCertificates) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRevokedCertificatesResponse { + const message = createBaseQueryGetRevokedCertificatesResponse(); + message.revokedCertificates = (object.revokedCertificates !== undefined && object.revokedCertificates !== null) + ? RevokedCertificates.fromPartial(object.revokedCertificates) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRevokedCertificatesRequest(): QueryAllRevokedCertificatesRequest { + return { pagination: undefined }; +} + +export const QueryAllRevokedCertificatesRequest = { + encode(message: QueryAllRevokedCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRevokedCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRevokedCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRevokedCertificatesRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllRevokedCertificatesRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllRevokedCertificatesRequest { + const message = createBaseQueryAllRevokedCertificatesRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRevokedCertificatesResponse(): QueryAllRevokedCertificatesResponse { + return { revokedCertificates: [], pagination: undefined }; +} + +export const QueryAllRevokedCertificatesResponse = { + encode(message: QueryAllRevokedCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.revokedCertificates) { + RevokedCertificates.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRevokedCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRevokedCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.revokedCertificates.push(RevokedCertificates.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRevokedCertificatesResponse { + return { + revokedCertificates: Array.isArray(object?.revokedCertificates) + ? object.revokedCertificates.map((e: any) => RevokedCertificates.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllRevokedCertificatesResponse): unknown { + const obj: any = {}; + if (message.revokedCertificates) { + obj.revokedCertificates = message.revokedCertificates.map((e) => e ? RevokedCertificates.toJSON(e) : undefined); + } else { + obj.revokedCertificates = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllRevokedCertificatesResponse { + const message = createBaseQueryAllRevokedCertificatesResponse(); + message.revokedCertificates = object.revokedCertificates?.map((e) => RevokedCertificates.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetApprovedRootCertificatesRequest(): QueryGetApprovedRootCertificatesRequest { + return {}; +} + +export const QueryGetApprovedRootCertificatesRequest = { + encode(_: QueryGetApprovedRootCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetApprovedRootCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetApprovedRootCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): QueryGetApprovedRootCertificatesRequest { + return {}; + }, + + toJSON(_: QueryGetApprovedRootCertificatesRequest): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): QueryGetApprovedRootCertificatesRequest { + const message = createBaseQueryGetApprovedRootCertificatesRequest(); + return message; + }, +}; + +function createBaseQueryGetApprovedRootCertificatesResponse(): QueryGetApprovedRootCertificatesResponse { + return { approvedRootCertificates: undefined }; +} + +export const QueryGetApprovedRootCertificatesResponse = { + encode(message: QueryGetApprovedRootCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.approvedRootCertificates !== undefined) { + ApprovedRootCertificates.encode(message.approvedRootCertificates, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetApprovedRootCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetApprovedRootCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.approvedRootCertificates = ApprovedRootCertificates.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetApprovedRootCertificatesResponse { + return { + approvedRootCertificates: isSet(object.approvedRootCertificates) + ? ApprovedRootCertificates.fromJSON(object.approvedRootCertificates) + : undefined, + }; + }, + + toJSON(message: QueryGetApprovedRootCertificatesResponse): unknown { + const obj: any = {}; + message.approvedRootCertificates !== undefined && (obj.approvedRootCertificates = message.approvedRootCertificates + ? ApprovedRootCertificates.toJSON(message.approvedRootCertificates) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetApprovedRootCertificatesResponse { + const message = createBaseQueryGetApprovedRootCertificatesResponse(); + message.approvedRootCertificates = + (object.approvedRootCertificates !== undefined && object.approvedRootCertificates !== null) + ? ApprovedRootCertificates.fromPartial(object.approvedRootCertificates) + : undefined; + return message; + }, +}; + +function createBaseQueryGetRevokedRootCertificatesRequest(): QueryGetRevokedRootCertificatesRequest { + return {}; +} + +export const QueryGetRevokedRootCertificatesRequest = { + encode(_: QueryGetRevokedRootCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRevokedRootCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRevokedRootCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): QueryGetRevokedRootCertificatesRequest { + return {}; + }, + + toJSON(_: QueryGetRevokedRootCertificatesRequest): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): QueryGetRevokedRootCertificatesRequest { + const message = createBaseQueryGetRevokedRootCertificatesRequest(); + return message; + }, +}; + +function createBaseQueryGetRevokedRootCertificatesResponse(): QueryGetRevokedRootCertificatesResponse { + return { revokedRootCertificates: undefined }; +} + +export const QueryGetRevokedRootCertificatesResponse = { + encode(message: QueryGetRevokedRootCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.revokedRootCertificates !== undefined) { + RevokedRootCertificates.encode(message.revokedRootCertificates, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRevokedRootCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRevokedRootCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.revokedRootCertificates = RevokedRootCertificates.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRevokedRootCertificatesResponse { + return { + revokedRootCertificates: isSet(object.revokedRootCertificates) + ? RevokedRootCertificates.fromJSON(object.revokedRootCertificates) + : undefined, + }; + }, + + toJSON(message: QueryGetRevokedRootCertificatesResponse): unknown { + const obj: any = {}; + message.revokedRootCertificates !== undefined && (obj.revokedRootCertificates = message.revokedRootCertificates + ? RevokedRootCertificates.toJSON(message.revokedRootCertificates) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRevokedRootCertificatesResponse { + const message = createBaseQueryGetRevokedRootCertificatesResponse(); + message.revokedRootCertificates = + (object.revokedRootCertificates !== undefined && object.revokedRootCertificates !== null) + ? RevokedRootCertificates.fromPartial(object.revokedRootCertificates) + : undefined; + return message; + }, +}; + +function createBaseQueryGetApprovedCertificatesBySubjectRequest(): QueryGetApprovedCertificatesBySubjectRequest { + return { subject: "" }; +} + +export const QueryGetApprovedCertificatesBySubjectRequest = { + encode(message: QueryGetApprovedCertificatesBySubjectRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetApprovedCertificatesBySubjectRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetApprovedCertificatesBySubjectRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetApprovedCertificatesBySubjectRequest { + return { subject: isSet(object.subject) ? String(object.subject) : "" }; + }, + + toJSON(message: QueryGetApprovedCertificatesBySubjectRequest): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetApprovedCertificatesBySubjectRequest { + const message = createBaseQueryGetApprovedCertificatesBySubjectRequest(); + message.subject = object.subject ?? ""; + return message; + }, +}; + +function createBaseQueryGetApprovedCertificatesBySubjectResponse(): QueryGetApprovedCertificatesBySubjectResponse { + return { approvedCertificatesBySubject: undefined }; +} + +export const QueryGetApprovedCertificatesBySubjectResponse = { + encode(message: QueryGetApprovedCertificatesBySubjectResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.approvedCertificatesBySubject !== undefined) { + ApprovedCertificatesBySubject.encode(message.approvedCertificatesBySubject, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetApprovedCertificatesBySubjectResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetApprovedCertificatesBySubjectResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.approvedCertificatesBySubject = ApprovedCertificatesBySubject.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetApprovedCertificatesBySubjectResponse { + return { + approvedCertificatesBySubject: isSet(object.approvedCertificatesBySubject) + ? ApprovedCertificatesBySubject.fromJSON(object.approvedCertificatesBySubject) + : undefined, + }; + }, + + toJSON(message: QueryGetApprovedCertificatesBySubjectResponse): unknown { + const obj: any = {}; + message.approvedCertificatesBySubject !== undefined + && (obj.approvedCertificatesBySubject = message.approvedCertificatesBySubject + ? ApprovedCertificatesBySubject.toJSON(message.approvedCertificatesBySubject) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetApprovedCertificatesBySubjectResponse { + const message = createBaseQueryGetApprovedCertificatesBySubjectResponse(); + message.approvedCertificatesBySubject = + (object.approvedCertificatesBySubject !== undefined && object.approvedCertificatesBySubject !== null) + ? ApprovedCertificatesBySubject.fromPartial(object.approvedCertificatesBySubject) + : undefined; + return message; + }, +}; + +function createBaseQueryGetRejectedCertificatesRequest(): QueryGetRejectedCertificatesRequest { + return { subject: "", subjectKeyId: "" }; +} + +export const QueryGetRejectedCertificatesRequest = { + encode(message: QueryGetRejectedCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRejectedCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRejectedCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRejectedCertificatesRequest { + return { + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + }; + }, + + toJSON(message: QueryGetRejectedCertificatesRequest): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRejectedCertificatesRequest { + const message = createBaseQueryGetRejectedCertificatesRequest(); + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + return message; + }, +}; + +function createBaseQueryGetRejectedCertificatesResponse(): QueryGetRejectedCertificatesResponse { + return { rejectedCertificate: undefined }; +} + +export const QueryGetRejectedCertificatesResponse = { + encode(message: QueryGetRejectedCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.rejectedCertificate !== undefined) { + RejectedCertificate.encode(message.rejectedCertificate, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRejectedCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRejectedCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rejectedCertificate = RejectedCertificate.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRejectedCertificatesResponse { + return { + rejectedCertificate: isSet(object.rejectedCertificate) + ? RejectedCertificate.fromJSON(object.rejectedCertificate) + : undefined, + }; + }, + + toJSON(message: QueryGetRejectedCertificatesResponse): unknown { + const obj: any = {}; + message.rejectedCertificate !== undefined && (obj.rejectedCertificate = message.rejectedCertificate + ? RejectedCertificate.toJSON(message.rejectedCertificate) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRejectedCertificatesResponse { + const message = createBaseQueryGetRejectedCertificatesResponse(); + message.rejectedCertificate = (object.rejectedCertificate !== undefined && object.rejectedCertificate !== null) + ? RejectedCertificate.fromPartial(object.rejectedCertificate) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRejectedCertificatesRequest(): QueryAllRejectedCertificatesRequest { + return { pagination: undefined }; +} + +export const QueryAllRejectedCertificatesRequest = { + encode(message: QueryAllRejectedCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRejectedCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRejectedCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRejectedCertificatesRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllRejectedCertificatesRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllRejectedCertificatesRequest { + const message = createBaseQueryAllRejectedCertificatesRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRejectedCertificatesResponse(): QueryAllRejectedCertificatesResponse { + return { rejectedCertificate: [], pagination: undefined }; +} + +export const QueryAllRejectedCertificatesResponse = { + encode(message: QueryAllRejectedCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rejectedCertificate) { + RejectedCertificate.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRejectedCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRejectedCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rejectedCertificate.push(RejectedCertificate.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRejectedCertificatesResponse { + return { + rejectedCertificate: Array.isArray(object?.rejectedCertificate) + ? object.rejectedCertificate.map((e: any) => RejectedCertificate.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllRejectedCertificatesResponse): unknown { + const obj: any = {}; + if (message.rejectedCertificate) { + obj.rejectedCertificate = message.rejectedCertificate.map((e) => e ? RejectedCertificate.toJSON(e) : undefined); + } else { + obj.rejectedCertificate = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllRejectedCertificatesResponse { + const message = createBaseQueryAllRejectedCertificatesResponse(); + message.rejectedCertificate = object.rejectedCertificate?.map((e) => RejectedCertificate.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetPkiRevocationDistributionPointRequest(): QueryGetPkiRevocationDistributionPointRequest { + return { vid: 0, label: "", issuerSubjectKeyID: "" }; +} + +export const QueryGetPkiRevocationDistributionPointRequest = { + encode(message: QueryGetPkiRevocationDistributionPointRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.label !== "") { + writer.uint32(18).string(message.label); + } + if (message.issuerSubjectKeyID !== "") { + writer.uint32(26).string(message.issuerSubjectKeyID); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetPkiRevocationDistributionPointRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetPkiRevocationDistributionPointRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.label = reader.string(); + break; + case 3: + message.issuerSubjectKeyID = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetPkiRevocationDistributionPointRequest { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + label: isSet(object.label) ? String(object.label) : "", + issuerSubjectKeyID: isSet(object.issuerSubjectKeyID) ? String(object.issuerSubjectKeyID) : "", + }; + }, + + toJSON(message: QueryGetPkiRevocationDistributionPointRequest): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.label !== undefined && (obj.label = message.label); + message.issuerSubjectKeyID !== undefined && (obj.issuerSubjectKeyID = message.issuerSubjectKeyID); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetPkiRevocationDistributionPointRequest { + const message = createBaseQueryGetPkiRevocationDistributionPointRequest(); + message.vid = object.vid ?? 0; + message.label = object.label ?? ""; + message.issuerSubjectKeyID = object.issuerSubjectKeyID ?? ""; + return message; + }, +}; + +function createBaseQueryGetPkiRevocationDistributionPointResponse(): QueryGetPkiRevocationDistributionPointResponse { + return { PkiRevocationDistributionPoint: undefined }; +} + +export const QueryGetPkiRevocationDistributionPointResponse = { + encode( + message: QueryGetPkiRevocationDistributionPointResponse, + writer: _m0.Writer = _m0.Writer.create(), + ): _m0.Writer { + if (message.PkiRevocationDistributionPoint !== undefined) { + PkiRevocationDistributionPoint.encode(message.PkiRevocationDistributionPoint, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetPkiRevocationDistributionPointResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetPkiRevocationDistributionPointResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.PkiRevocationDistributionPoint = PkiRevocationDistributionPoint.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetPkiRevocationDistributionPointResponse { + return { + PkiRevocationDistributionPoint: isSet(object.PkiRevocationDistributionPoint) + ? PkiRevocationDistributionPoint.fromJSON(object.PkiRevocationDistributionPoint) + : undefined, + }; + }, + + toJSON(message: QueryGetPkiRevocationDistributionPointResponse): unknown { + const obj: any = {}; + message.PkiRevocationDistributionPoint !== undefined + && (obj.PkiRevocationDistributionPoint = message.PkiRevocationDistributionPoint + ? PkiRevocationDistributionPoint.toJSON(message.PkiRevocationDistributionPoint) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetPkiRevocationDistributionPointResponse { + const message = createBaseQueryGetPkiRevocationDistributionPointResponse(); + message.PkiRevocationDistributionPoint = + (object.PkiRevocationDistributionPoint !== undefined && object.PkiRevocationDistributionPoint !== null) + ? PkiRevocationDistributionPoint.fromPartial(object.PkiRevocationDistributionPoint) + : undefined; + return message; + }, +}; + +function createBaseQueryAllPkiRevocationDistributionPointRequest(): QueryAllPkiRevocationDistributionPointRequest { + return { pagination: undefined }; +} + +export const QueryAllPkiRevocationDistributionPointRequest = { + encode(message: QueryAllPkiRevocationDistributionPointRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllPkiRevocationDistributionPointRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllPkiRevocationDistributionPointRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllPkiRevocationDistributionPointRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllPkiRevocationDistributionPointRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllPkiRevocationDistributionPointRequest { + const message = createBaseQueryAllPkiRevocationDistributionPointRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllPkiRevocationDistributionPointResponse(): QueryAllPkiRevocationDistributionPointResponse { + return { PkiRevocationDistributionPoint: [], pagination: undefined }; +} + +export const QueryAllPkiRevocationDistributionPointResponse = { + encode( + message: QueryAllPkiRevocationDistributionPointResponse, + writer: _m0.Writer = _m0.Writer.create(), + ): _m0.Writer { + for (const v of message.PkiRevocationDistributionPoint) { + PkiRevocationDistributionPoint.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllPkiRevocationDistributionPointResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllPkiRevocationDistributionPointResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.PkiRevocationDistributionPoint.push(PkiRevocationDistributionPoint.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllPkiRevocationDistributionPointResponse { + return { + PkiRevocationDistributionPoint: Array.isArray(object?.PkiRevocationDistributionPoint) + ? object.PkiRevocationDistributionPoint.map((e: any) => PkiRevocationDistributionPoint.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllPkiRevocationDistributionPointResponse): unknown { + const obj: any = {}; + if (message.PkiRevocationDistributionPoint) { + obj.PkiRevocationDistributionPoint = message.PkiRevocationDistributionPoint.map((e) => + e ? PkiRevocationDistributionPoint.toJSON(e) : undefined + ); + } else { + obj.PkiRevocationDistributionPoint = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllPkiRevocationDistributionPointResponse { + const message = createBaseQueryAllPkiRevocationDistributionPointResponse(); + message.PkiRevocationDistributionPoint = + object.PkiRevocationDistributionPoint?.map((e) => PkiRevocationDistributionPoint.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest(): QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest { + return { issuerSubjectKeyID: "" }; +} + +export const QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest = { + encode( + message: QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest, + writer: _m0.Writer = _m0.Writer.create(), + ): _m0.Writer { + if (message.issuerSubjectKeyID !== "") { + writer.uint32(10).string(message.issuerSubjectKeyID); + } + return writer; + }, + + decode( + input: _m0.Reader | Uint8Array, + length?: number, + ): QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.issuerSubjectKeyID = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest { + return { issuerSubjectKeyID: isSet(object.issuerSubjectKeyID) ? String(object.issuerSubjectKeyID) : "" }; + }, + + toJSON(message: QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest): unknown { + const obj: any = {}; + message.issuerSubjectKeyID !== undefined && (obj.issuerSubjectKeyID = message.issuerSubjectKeyID); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest { + const message = createBaseQueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest(); + message.issuerSubjectKeyID = object.issuerSubjectKeyID ?? ""; + return message; + }, +}; + +function createBaseQueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse(): QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse { + return { pkiRevocationDistributionPointsByIssuerSubjectKeyID: undefined }; +} + +export const QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse = { + encode( + message: QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse, + writer: _m0.Writer = _m0.Writer.create(), + ): _m0.Writer { + if (message.pkiRevocationDistributionPointsByIssuerSubjectKeyID !== undefined) { + PkiRevocationDistributionPointsByIssuerSubjectKeyID.encode( + message.pkiRevocationDistributionPointsByIssuerSubjectKeyID, + writer.uint32(10).fork(), + ).ldelim(); + } + return writer; + }, + + decode( + input: _m0.Reader | Uint8Array, + length?: number, + ): QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pkiRevocationDistributionPointsByIssuerSubjectKeyID = + PkiRevocationDistributionPointsByIssuerSubjectKeyID.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse { + return { + pkiRevocationDistributionPointsByIssuerSubjectKeyID: + isSet(object.pkiRevocationDistributionPointsByIssuerSubjectKeyID) + ? PkiRevocationDistributionPointsByIssuerSubjectKeyID.fromJSON( + object.pkiRevocationDistributionPointsByIssuerSubjectKeyID, + ) + : undefined, + }; + }, + + toJSON(message: QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse): unknown { + const obj: any = {}; + message.pkiRevocationDistributionPointsByIssuerSubjectKeyID !== undefined + && (obj.pkiRevocationDistributionPointsByIssuerSubjectKeyID = + message.pkiRevocationDistributionPointsByIssuerSubjectKeyID + ? PkiRevocationDistributionPointsByIssuerSubjectKeyID.toJSON( + message.pkiRevocationDistributionPointsByIssuerSubjectKeyID, + ) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse { + const message = createBaseQueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse(); + message.pkiRevocationDistributionPointsByIssuerSubjectKeyID = + (object.pkiRevocationDistributionPointsByIssuerSubjectKeyID !== undefined + && object.pkiRevocationDistributionPointsByIssuerSubjectKeyID !== null) + ? PkiRevocationDistributionPointsByIssuerSubjectKeyID.fromPartial( + object.pkiRevocationDistributionPointsByIssuerSubjectKeyID, + ) + : undefined; + return message; + }, +}; + +function createBaseQueryGetNocRootCertificatesRequest(): QueryGetNocRootCertificatesRequest { + return { vid: 0 }; +} + +export const QueryGetNocRootCertificatesRequest = { + encode(message: QueryGetNocRootCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetNocRootCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetNocRootCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetNocRootCertificatesRequest { + return { vid: isSet(object.vid) ? Number(object.vid) : 0 }; + }, + + toJSON(message: QueryGetNocRootCertificatesRequest): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetNocRootCertificatesRequest { + const message = createBaseQueryGetNocRootCertificatesRequest(); + message.vid = object.vid ?? 0; + return message; + }, +}; + +function createBaseQueryGetNocRootCertificatesResponse(): QueryGetNocRootCertificatesResponse { + return { nocRootCertificates: undefined }; +} + +export const QueryGetNocRootCertificatesResponse = { + encode(message: QueryGetNocRootCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nocRootCertificates !== undefined) { + NocRootCertificates.encode(message.nocRootCertificates, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetNocRootCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetNocRootCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nocRootCertificates = NocRootCertificates.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetNocRootCertificatesResponse { + return { + nocRootCertificates: isSet(object.nocRootCertificates) + ? NocRootCertificates.fromJSON(object.nocRootCertificates) + : undefined, + }; + }, + + toJSON(message: QueryGetNocRootCertificatesResponse): unknown { + const obj: any = {}; + message.nocRootCertificates !== undefined && (obj.nocRootCertificates = message.nocRootCertificates + ? NocRootCertificates.toJSON(message.nocRootCertificates) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetNocRootCertificatesResponse { + const message = createBaseQueryGetNocRootCertificatesResponse(); + message.nocRootCertificates = (object.nocRootCertificates !== undefined && object.nocRootCertificates !== null) + ? NocRootCertificates.fromPartial(object.nocRootCertificates) + : undefined; + return message; + }, +}; + +function createBaseQueryAllNocRootCertificatesRequest(): QueryAllNocRootCertificatesRequest { + return { pagination: undefined }; +} + +export const QueryAllNocRootCertificatesRequest = { + encode(message: QueryAllNocRootCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllNocRootCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllNocRootCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllNocRootCertificatesRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllNocRootCertificatesRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllNocRootCertificatesRequest { + const message = createBaseQueryAllNocRootCertificatesRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllNocRootCertificatesResponse(): QueryAllNocRootCertificatesResponse { + return { nocRootCertificates: [], pagination: undefined }; +} + +export const QueryAllNocRootCertificatesResponse = { + encode(message: QueryAllNocRootCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.nocRootCertificates) { + NocRootCertificates.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllNocRootCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllNocRootCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nocRootCertificates.push(NocRootCertificates.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllNocRootCertificatesResponse { + return { + nocRootCertificates: Array.isArray(object?.nocRootCertificates) + ? object.nocRootCertificates.map((e: any) => NocRootCertificates.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllNocRootCertificatesResponse): unknown { + const obj: any = {}; + if (message.nocRootCertificates) { + obj.nocRootCertificates = message.nocRootCertificates.map((e) => e ? NocRootCertificates.toJSON(e) : undefined); + } else { + obj.nocRootCertificates = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllNocRootCertificatesResponse { + const message = createBaseQueryAllNocRootCertificatesResponse(); + message.nocRootCertificates = object.nocRootCertificates?.map((e) => NocRootCertificates.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetNocIcaCertificatesRequest(): QueryGetNocIcaCertificatesRequest { + return { vid: 0 }; +} + +export const QueryGetNocIcaCertificatesRequest = { + encode(message: QueryGetNocIcaCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetNocIcaCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetNocIcaCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetNocIcaCertificatesRequest { + return { vid: isSet(object.vid) ? Number(object.vid) : 0 }; + }, + + toJSON(message: QueryGetNocIcaCertificatesRequest): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetNocIcaCertificatesRequest { + const message = createBaseQueryGetNocIcaCertificatesRequest(); + message.vid = object.vid ?? 0; + return message; + }, +}; + +function createBaseQueryGetNocIcaCertificatesResponse(): QueryGetNocIcaCertificatesResponse { + return { nocIcaCertificates: undefined }; +} + +export const QueryGetNocIcaCertificatesResponse = { + encode(message: QueryGetNocIcaCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nocIcaCertificates !== undefined) { + NocIcaCertificates.encode(message.nocIcaCertificates, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetNocIcaCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetNocIcaCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nocIcaCertificates = NocIcaCertificates.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetNocIcaCertificatesResponse { + return { + nocIcaCertificates: isSet(object.nocIcaCertificates) + ? NocIcaCertificates.fromJSON(object.nocIcaCertificates) + : undefined, + }; + }, + + toJSON(message: QueryGetNocIcaCertificatesResponse): unknown { + const obj: any = {}; + message.nocIcaCertificates !== undefined && (obj.nocIcaCertificates = message.nocIcaCertificates + ? NocIcaCertificates.toJSON(message.nocIcaCertificates) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetNocIcaCertificatesResponse { + const message = createBaseQueryGetNocIcaCertificatesResponse(); + message.nocIcaCertificates = (object.nocIcaCertificates !== undefined && object.nocIcaCertificates !== null) + ? NocIcaCertificates.fromPartial(object.nocIcaCertificates) + : undefined; + return message; + }, +}; + +function createBaseQueryAllNocIcaCertificatesRequest(): QueryAllNocIcaCertificatesRequest { + return { pagination: undefined }; +} + +export const QueryAllNocIcaCertificatesRequest = { + encode(message: QueryAllNocIcaCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllNocIcaCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllNocIcaCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllNocIcaCertificatesRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllNocIcaCertificatesRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllNocIcaCertificatesRequest { + const message = createBaseQueryAllNocIcaCertificatesRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllNocIcaCertificatesResponse(): QueryAllNocIcaCertificatesResponse { + return { nocIcaCertificates: [], pagination: undefined }; +} + +export const QueryAllNocIcaCertificatesResponse = { + encode(message: QueryAllNocIcaCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.nocIcaCertificates) { + NocIcaCertificates.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllNocIcaCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllNocIcaCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nocIcaCertificates.push(NocIcaCertificates.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllNocIcaCertificatesResponse { + return { + nocIcaCertificates: Array.isArray(object?.nocIcaCertificates) + ? object.nocIcaCertificates.map((e: any) => NocIcaCertificates.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllNocIcaCertificatesResponse): unknown { + const obj: any = {}; + if (message.nocIcaCertificates) { + obj.nocIcaCertificates = message.nocIcaCertificates.map((e) => e ? NocIcaCertificates.toJSON(e) : undefined); + } else { + obj.nocIcaCertificates = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllNocIcaCertificatesResponse { + const message = createBaseQueryAllNocIcaCertificatesResponse(); + message.nocIcaCertificates = object.nocIcaCertificates?.map((e) => NocIcaCertificates.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetRevokedNocRootCertificatesRequest(): QueryGetRevokedNocRootCertificatesRequest { + return { subject: "", subjectKeyId: "" }; +} + +export const QueryGetRevokedNocRootCertificatesRequest = { + encode(message: QueryGetRevokedNocRootCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRevokedNocRootCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRevokedNocRootCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRevokedNocRootCertificatesRequest { + return { + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + }; + }, + + toJSON(message: QueryGetRevokedNocRootCertificatesRequest): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRevokedNocRootCertificatesRequest { + const message = createBaseQueryGetRevokedNocRootCertificatesRequest(); + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + return message; + }, +}; + +function createBaseQueryGetRevokedNocRootCertificatesResponse(): QueryGetRevokedNocRootCertificatesResponse { + return { revokedNocRootCertificates: undefined }; +} + +export const QueryGetRevokedNocRootCertificatesResponse = { + encode(message: QueryGetRevokedNocRootCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.revokedNocRootCertificates !== undefined) { + RevokedNocRootCertificates.encode(message.revokedNocRootCertificates, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRevokedNocRootCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRevokedNocRootCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.revokedNocRootCertificates = RevokedNocRootCertificates.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRevokedNocRootCertificatesResponse { + return { + revokedNocRootCertificates: isSet(object.revokedNocRootCertificates) + ? RevokedNocRootCertificates.fromJSON(object.revokedNocRootCertificates) + : undefined, + }; + }, + + toJSON(message: QueryGetRevokedNocRootCertificatesResponse): unknown { + const obj: any = {}; + message.revokedNocRootCertificates !== undefined + && (obj.revokedNocRootCertificates = message.revokedNocRootCertificates + ? RevokedNocRootCertificates.toJSON(message.revokedNocRootCertificates) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRevokedNocRootCertificatesResponse { + const message = createBaseQueryGetRevokedNocRootCertificatesResponse(); + message.revokedNocRootCertificates = + (object.revokedNocRootCertificates !== undefined && object.revokedNocRootCertificates !== null) + ? RevokedNocRootCertificates.fromPartial(object.revokedNocRootCertificates) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRevokedNocRootCertificatesRequest(): QueryAllRevokedNocRootCertificatesRequest { + return { pagination: undefined }; +} + +export const QueryAllRevokedNocRootCertificatesRequest = { + encode(message: QueryAllRevokedNocRootCertificatesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRevokedNocRootCertificatesRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRevokedNocRootCertificatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRevokedNocRootCertificatesRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllRevokedNocRootCertificatesRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllRevokedNocRootCertificatesRequest { + const message = createBaseQueryAllRevokedNocRootCertificatesRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRevokedNocRootCertificatesResponse(): QueryAllRevokedNocRootCertificatesResponse { + return { revokedNocRootCertificates: [], pagination: undefined }; +} + +export const QueryAllRevokedNocRootCertificatesResponse = { + encode(message: QueryAllRevokedNocRootCertificatesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.revokedNocRootCertificates) { + RevokedNocRootCertificates.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRevokedNocRootCertificatesResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRevokedNocRootCertificatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.revokedNocRootCertificates.push(RevokedNocRootCertificates.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRevokedNocRootCertificatesResponse { + return { + revokedNocRootCertificates: Array.isArray(object?.revokedNocRootCertificates) + ? object.revokedNocRootCertificates.map((e: any) => RevokedNocRootCertificates.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllRevokedNocRootCertificatesResponse): unknown { + const obj: any = {}; + if (message.revokedNocRootCertificates) { + obj.revokedNocRootCertificates = message.revokedNocRootCertificates.map((e) => + e ? RevokedNocRootCertificates.toJSON(e) : undefined + ); + } else { + obj.revokedNocRootCertificates = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllRevokedNocRootCertificatesResponse { + const message = createBaseQueryAllRevokedNocRootCertificatesResponse(); + message.revokedNocRootCertificates = + object.revokedNocRootCertificates?.map((e) => RevokedNocRootCertificates.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetNocRootCertificatesByVidAndSkidRequest(): QueryGetNocRootCertificatesByVidAndSkidRequest { + return { vid: 0, subjectKeyId: "" }; +} + +export const QueryGetNocRootCertificatesByVidAndSkidRequest = { + encode( + message: QueryGetNocRootCertificatesByVidAndSkidRequest, + writer: _m0.Writer = _m0.Writer.create(), + ): _m0.Writer { + if (message.vid !== 0) { + writer.uint32(8).int32(message.vid); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetNocRootCertificatesByVidAndSkidRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetNocRootCertificatesByVidAndSkidRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vid = reader.int32(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetNocRootCertificatesByVidAndSkidRequest { + return { + vid: isSet(object.vid) ? Number(object.vid) : 0, + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + }; + }, + + toJSON(message: QueryGetNocRootCertificatesByVidAndSkidRequest): unknown { + const obj: any = {}; + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetNocRootCertificatesByVidAndSkidRequest { + const message = createBaseQueryGetNocRootCertificatesByVidAndSkidRequest(); + message.vid = object.vid ?? 0; + message.subjectKeyId = object.subjectKeyId ?? ""; + return message; + }, +}; + +function createBaseQueryGetNocRootCertificatesByVidAndSkidResponse(): QueryGetNocRootCertificatesByVidAndSkidResponse { + return { nocRootCertificatesByVidAndSkid: undefined }; +} + +export const QueryGetNocRootCertificatesByVidAndSkidResponse = { + encode( + message: QueryGetNocRootCertificatesByVidAndSkidResponse, + writer: _m0.Writer = _m0.Writer.create(), + ): _m0.Writer { + if (message.nocRootCertificatesByVidAndSkid !== undefined) { + NocRootCertificatesByVidAndSkid.encode(message.nocRootCertificatesByVidAndSkid, writer.uint32(10).fork()) + .ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetNocRootCertificatesByVidAndSkidResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetNocRootCertificatesByVidAndSkidResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nocRootCertificatesByVidAndSkid = NocRootCertificatesByVidAndSkid.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetNocRootCertificatesByVidAndSkidResponse { + return { + nocRootCertificatesByVidAndSkid: isSet(object.nocRootCertificatesByVidAndSkid) + ? NocRootCertificatesByVidAndSkid.fromJSON(object.nocRootCertificatesByVidAndSkid) + : undefined, + }; + }, + + toJSON(message: QueryGetNocRootCertificatesByVidAndSkidResponse): unknown { + const obj: any = {}; + message.nocRootCertificatesByVidAndSkid !== undefined + && (obj.nocRootCertificatesByVidAndSkid = message.nocRootCertificatesByVidAndSkid + ? NocRootCertificatesByVidAndSkid.toJSON(message.nocRootCertificatesByVidAndSkid) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetNocRootCertificatesByVidAndSkidResponse { + const message = createBaseQueryGetNocRootCertificatesByVidAndSkidResponse(); + message.nocRootCertificatesByVidAndSkid = + (object.nocRootCertificatesByVidAndSkid !== undefined && object.nocRootCertificatesByVidAndSkid !== null) + ? NocRootCertificatesByVidAndSkid.fromPartial(object.nocRootCertificatesByVidAndSkid) + : undefined; + return message; + }, +}; + +/** Query defines the gRPC querier service. */ +export interface Query { + /** Queries a ApprovedCertificates by index. */ + ApprovedCertificates(request: QueryGetApprovedCertificatesRequest): Promise; + /** Queries a list of ApprovedCertificates items. */ + ApprovedCertificatesAll(request: QueryAllApprovedCertificatesRequest): Promise; + /** Queries a ProposedCertificate by index. */ + ProposedCertificate(request: QueryGetProposedCertificateRequest): Promise; + /** Queries a list of ProposedCertificate items. */ + ProposedCertificateAll(request: QueryAllProposedCertificateRequest): Promise; + /** Queries a ChildCertificates by index. */ + ChildCertificates(request: QueryGetChildCertificatesRequest): Promise; + /** Queries a ProposedCertificateRevocation by index. */ + ProposedCertificateRevocation( + request: QueryGetProposedCertificateRevocationRequest, + ): Promise; + /** Queries a list of ProposedCertificateRevocation items. */ + ProposedCertificateRevocationAll( + request: QueryAllProposedCertificateRevocationRequest, + ): Promise; + /** Queries a RevokedCertificates by index. */ + RevokedCertificates(request: QueryGetRevokedCertificatesRequest): Promise; + /** Queries a list of RevokedCertificates items. */ + RevokedCertificatesAll(request: QueryAllRevokedCertificatesRequest): Promise; + /** Queries a ApprovedRootCertificates by index. */ + ApprovedRootCertificates( + request: QueryGetApprovedRootCertificatesRequest, + ): Promise; + /** Queries a RevokedRootCertificates by index. */ + RevokedRootCertificates( + request: QueryGetRevokedRootCertificatesRequest, + ): Promise; + /** Queries a ApprovedCertificatesBySubject by index. */ + ApprovedCertificatesBySubject( + request: QueryGetApprovedCertificatesBySubjectRequest, + ): Promise; + /** Queries a RejectedCertificate by index. */ + RejectedCertificate(request: QueryGetRejectedCertificatesRequest): Promise; + /** Queries a list of RejectedCertificate items. */ + RejectedCertificateAll(request: QueryAllRejectedCertificatesRequest): Promise; + /** Queries a PkiRevocationDistributionPoint by index. */ + PkiRevocationDistributionPoint( + request: QueryGetPkiRevocationDistributionPointRequest, + ): Promise; + /** Queries a list of PkiRevocationDistributionPoint items. */ + PkiRevocationDistributionPointAll( + request: QueryAllPkiRevocationDistributionPointRequest, + ): Promise; + /** Queries a PkiRevocationDistributionPointsByIssuerSubjectKeyID by index. */ + PkiRevocationDistributionPointsByIssuerSubjectKeyID( + request: QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest, + ): Promise; + /** Queries a NocRootCertificates by index. */ + NocRootCertificates(request: QueryGetNocRootCertificatesRequest): Promise; + /** Queries a NocRootCertificatesByVidAndSkid by index. */ + NocRootCertificatesByVidAndSkid( + request: QueryGetNocRootCertificatesByVidAndSkidRequest, + ): Promise; + /** Queries a list of NocRootCertificates items. */ + NocRootCertificatesAll(request: QueryAllNocRootCertificatesRequest): Promise; + /** Queries a NocIcaCertificates by index. */ + NocIcaCertificates(request: QueryGetNocIcaCertificatesRequest): Promise; + /** Queries a list of NocIcaCertificates items. */ + NocIcaCertificatesAll(request: QueryAllNocIcaCertificatesRequest): Promise; + /** Queries a RevokedNocRootCertificates by index. */ + RevokedNocRootCertificates( + request: QueryGetRevokedNocRootCertificatesRequest, + ): Promise; + /** Queries a list of RevokedNocRootCertificates items. */ + RevokedNocRootCertificatesAll( + request: QueryAllRevokedNocRootCertificatesRequest, + ): Promise; +} + +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.ApprovedCertificates = this.ApprovedCertificates.bind(this); + this.ApprovedCertificatesAll = this.ApprovedCertificatesAll.bind(this); + this.ProposedCertificate = this.ProposedCertificate.bind(this); + this.ProposedCertificateAll = this.ProposedCertificateAll.bind(this); + this.ChildCertificates = this.ChildCertificates.bind(this); + this.ProposedCertificateRevocation = this.ProposedCertificateRevocation.bind(this); + this.ProposedCertificateRevocationAll = this.ProposedCertificateRevocationAll.bind(this); + this.RevokedCertificates = this.RevokedCertificates.bind(this); + this.RevokedCertificatesAll = this.RevokedCertificatesAll.bind(this); + this.ApprovedRootCertificates = this.ApprovedRootCertificates.bind(this); + this.RevokedRootCertificates = this.RevokedRootCertificates.bind(this); + this.ApprovedCertificatesBySubject = this.ApprovedCertificatesBySubject.bind(this); + this.RejectedCertificate = this.RejectedCertificate.bind(this); + this.RejectedCertificateAll = this.RejectedCertificateAll.bind(this); + this.PkiRevocationDistributionPoint = this.PkiRevocationDistributionPoint.bind(this); + this.PkiRevocationDistributionPointAll = this.PkiRevocationDistributionPointAll.bind(this); + this.PkiRevocationDistributionPointsByIssuerSubjectKeyID = this.PkiRevocationDistributionPointsByIssuerSubjectKeyID + .bind(this); + this.NocRootCertificates = this.NocRootCertificates.bind(this); + this.NocRootCertificatesByVidAndSkid = this.NocRootCertificatesByVidAndSkid.bind(this); + this.NocRootCertificatesAll = this.NocRootCertificatesAll.bind(this); + this.NocIcaCertificates = this.NocIcaCertificates.bind(this); + this.NocIcaCertificatesAll = this.NocIcaCertificatesAll.bind(this); + this.RevokedNocRootCertificates = this.RevokedNocRootCertificates.bind(this); + this.RevokedNocRootCertificatesAll = this.RevokedNocRootCertificatesAll.bind(this); + } + ApprovedCertificates(request: QueryGetApprovedCertificatesRequest): Promise { + const data = QueryGetApprovedCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "ApprovedCertificates", + data, + ); + return promise.then((data) => QueryGetApprovedCertificatesResponse.decode(new _m0.Reader(data))); + } + + ApprovedCertificatesAll(request: QueryAllApprovedCertificatesRequest): Promise { + const data = QueryAllApprovedCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "ApprovedCertificatesAll", + data, + ); + return promise.then((data) => QueryAllApprovedCertificatesResponse.decode(new _m0.Reader(data))); + } + + ProposedCertificate(request: QueryGetProposedCertificateRequest): Promise { + const data = QueryGetProposedCertificateRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "ProposedCertificate", + data, + ); + return promise.then((data) => QueryGetProposedCertificateResponse.decode(new _m0.Reader(data))); + } + + ProposedCertificateAll(request: QueryAllProposedCertificateRequest): Promise { + const data = QueryAllProposedCertificateRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "ProposedCertificateAll", + data, + ); + return promise.then((data) => QueryAllProposedCertificateResponse.decode(new _m0.Reader(data))); + } + + ChildCertificates(request: QueryGetChildCertificatesRequest): Promise { + const data = QueryGetChildCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.pki.Query", "ChildCertificates", data); + return promise.then((data) => QueryGetChildCertificatesResponse.decode(new _m0.Reader(data))); + } + + ProposedCertificateRevocation( + request: QueryGetProposedCertificateRevocationRequest, + ): Promise { + const data = QueryGetProposedCertificateRevocationRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "ProposedCertificateRevocation", + data, + ); + return promise.then((data) => QueryGetProposedCertificateRevocationResponse.decode(new _m0.Reader(data))); + } + + ProposedCertificateRevocationAll( + request: QueryAllProposedCertificateRevocationRequest, + ): Promise { + const data = QueryAllProposedCertificateRevocationRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "ProposedCertificateRevocationAll", + data, + ); + return promise.then((data) => QueryAllProposedCertificateRevocationResponse.decode(new _m0.Reader(data))); + } + + RevokedCertificates(request: QueryGetRevokedCertificatesRequest): Promise { + const data = QueryGetRevokedCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "RevokedCertificates", + data, + ); + return promise.then((data) => QueryGetRevokedCertificatesResponse.decode(new _m0.Reader(data))); + } + + RevokedCertificatesAll(request: QueryAllRevokedCertificatesRequest): Promise { + const data = QueryAllRevokedCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "RevokedCertificatesAll", + data, + ); + return promise.then((data) => QueryAllRevokedCertificatesResponse.decode(new _m0.Reader(data))); + } + + ApprovedRootCertificates( + request: QueryGetApprovedRootCertificatesRequest, + ): Promise { + const data = QueryGetApprovedRootCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "ApprovedRootCertificates", + data, + ); + return promise.then((data) => QueryGetApprovedRootCertificatesResponse.decode(new _m0.Reader(data))); + } + + RevokedRootCertificates( + request: QueryGetRevokedRootCertificatesRequest, + ): Promise { + const data = QueryGetRevokedRootCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "RevokedRootCertificates", + data, + ); + return promise.then((data) => QueryGetRevokedRootCertificatesResponse.decode(new _m0.Reader(data))); + } + + ApprovedCertificatesBySubject( + request: QueryGetApprovedCertificatesBySubjectRequest, + ): Promise { + const data = QueryGetApprovedCertificatesBySubjectRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "ApprovedCertificatesBySubject", + data, + ); + return promise.then((data) => QueryGetApprovedCertificatesBySubjectResponse.decode(new _m0.Reader(data))); + } + + RejectedCertificate(request: QueryGetRejectedCertificatesRequest): Promise { + const data = QueryGetRejectedCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "RejectedCertificate", + data, + ); + return promise.then((data) => QueryGetRejectedCertificatesResponse.decode(new _m0.Reader(data))); + } + + RejectedCertificateAll(request: QueryAllRejectedCertificatesRequest): Promise { + const data = QueryAllRejectedCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "RejectedCertificateAll", + data, + ); + return promise.then((data) => QueryAllRejectedCertificatesResponse.decode(new _m0.Reader(data))); + } + + PkiRevocationDistributionPoint( + request: QueryGetPkiRevocationDistributionPointRequest, + ): Promise { + const data = QueryGetPkiRevocationDistributionPointRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "PkiRevocationDistributionPoint", + data, + ); + return promise.then((data) => QueryGetPkiRevocationDistributionPointResponse.decode(new _m0.Reader(data))); + } + + PkiRevocationDistributionPointAll( + request: QueryAllPkiRevocationDistributionPointRequest, + ): Promise { + const data = QueryAllPkiRevocationDistributionPointRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "PkiRevocationDistributionPointAll", + data, + ); + return promise.then((data) => QueryAllPkiRevocationDistributionPointResponse.decode(new _m0.Reader(data))); + } + + PkiRevocationDistributionPointsByIssuerSubjectKeyID( + request: QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest, + ): Promise { + const data = QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "PkiRevocationDistributionPointsByIssuerSubjectKeyID", + data, + ); + return promise.then((data) => + QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse.decode(new _m0.Reader(data)) + ); + } + + NocRootCertificates(request: QueryGetNocRootCertificatesRequest): Promise { + const data = QueryGetNocRootCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "NocRootCertificates", + data, + ); + return promise.then((data) => QueryGetNocRootCertificatesResponse.decode(new _m0.Reader(data))); + } + + NocRootCertificatesByVidAndSkid( + request: QueryGetNocRootCertificatesByVidAndSkidRequest, + ): Promise { + const data = QueryGetNocRootCertificatesByVidAndSkidRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "NocRootCertificatesByVidAndSkid", + data, + ); + return promise.then((data) => QueryGetNocRootCertificatesByVidAndSkidResponse.decode(new _m0.Reader(data))); + } + + NocRootCertificatesAll(request: QueryAllNocRootCertificatesRequest): Promise { + const data = QueryAllNocRootCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "NocRootCertificatesAll", + data, + ); + return promise.then((data) => QueryAllNocRootCertificatesResponse.decode(new _m0.Reader(data))); + } + + NocIcaCertificates(request: QueryGetNocIcaCertificatesRequest): Promise { + const data = QueryGetNocIcaCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "NocIcaCertificates", + data, + ); + return promise.then((data) => QueryGetNocIcaCertificatesResponse.decode(new _m0.Reader(data))); + } + + NocIcaCertificatesAll(request: QueryAllNocIcaCertificatesRequest): Promise { + const data = QueryAllNocIcaCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "NocIcaCertificatesAll", + data, + ); + return promise.then((data) => QueryAllNocIcaCertificatesResponse.decode(new _m0.Reader(data))); + } + + RevokedNocRootCertificates( + request: QueryGetRevokedNocRootCertificatesRequest, + ): Promise { + const data = QueryGetRevokedNocRootCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "RevokedNocRootCertificates", + data, + ); + return promise.then((data) => QueryGetRevokedNocRootCertificatesResponse.decode(new _m0.Reader(data))); + } + + RevokedNocRootCertificatesAll( + request: QueryAllRevokedNocRootCertificatesRequest, + ): Promise { + const data = QueryAllRevokedNocRootCertificatesRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Query", + "RevokedNocRootCertificatesAll", + data, + ); + return promise.then((data) => QueryAllRevokedNocRootCertificatesResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/rejected_certificate.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/rejected_certificate.ts new file mode 100644 index 000000000..b16519c33 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/rejected_certificate.ts @@ -0,0 +1,107 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Certificate } from "./certificate"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface RejectedCertificate { + subject: string; + subjectKeyId: string; + certs: Certificate[]; + schemaVersion: number; +} + +function createBaseRejectedCertificate(): RejectedCertificate { + return { subject: "", subjectKeyId: "", certs: [], schemaVersion: 0 }; +} + +export const RejectedCertificate = { + encode(message: RejectedCertificate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + for (const v of message.certs) { + Certificate.encode(v!, writer.uint32(26).fork()).ldelim(); + } + if (message.schemaVersion !== 0) { + writer.uint32(32).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RejectedCertificate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRejectedCertificate(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + case 3: + message.certs.push(Certificate.decode(reader, reader.uint32())); + break; + case 4: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RejectedCertificate { + return { + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + certs: Array.isArray(object?.certs) ? object.certs.map((e: any) => Certificate.fromJSON(e)) : [], + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: RejectedCertificate): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + if (message.certs) { + obj.certs = message.certs.map((e) => e ? Certificate.toJSON(e) : undefined); + } else { + obj.certs = []; + } + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): RejectedCertificate { + const message = createBaseRejectedCertificate(); + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.certs = object.certs?.map((e) => Certificate.fromPartial(e)) || []; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/revoked_certificates.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/revoked_certificates.ts new file mode 100644 index 000000000..78f9e77e5 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/revoked_certificates.ts @@ -0,0 +1,107 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Certificate } from "./certificate"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface RevokedCertificates { + subject: string; + subjectKeyId: string; + certs: Certificate[]; + schemaVersion: number; +} + +function createBaseRevokedCertificates(): RevokedCertificates { + return { subject: "", subjectKeyId: "", certs: [], schemaVersion: 0 }; +} + +export const RevokedCertificates = { + encode(message: RevokedCertificates, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + for (const v of message.certs) { + Certificate.encode(v!, writer.uint32(26).fork()).ldelim(); + } + if (message.schemaVersion !== 0) { + writer.uint32(32).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RevokedCertificates { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRevokedCertificates(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + case 3: + message.certs.push(Certificate.decode(reader, reader.uint32())); + break; + case 4: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RevokedCertificates { + return { + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + certs: Array.isArray(object?.certs) ? object.certs.map((e: any) => Certificate.fromJSON(e)) : [], + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: RevokedCertificates): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + if (message.certs) { + obj.certs = message.certs.map((e) => e ? Certificate.toJSON(e) : undefined); + } else { + obj.certs = []; + } + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): RevokedCertificates { + const message = createBaseRevokedCertificates(); + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.certs = object.certs?.map((e) => Certificate.fromPartial(e)) || []; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/revoked_noc_root_certificates.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/revoked_noc_root_certificates.ts new file mode 100644 index 000000000..ddfc25c72 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/revoked_noc_root_certificates.ts @@ -0,0 +1,97 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Certificate } from "./certificate"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface RevokedNocRootCertificates { + subject: string; + subjectKeyId: string; + certs: Certificate[]; +} + +function createBaseRevokedNocRootCertificates(): RevokedNocRootCertificates { + return { subject: "", subjectKeyId: "", certs: [] }; +} + +export const RevokedNocRootCertificates = { + encode(message: RevokedNocRootCertificates, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.subject !== "") { + writer.uint32(10).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(18).string(message.subjectKeyId); + } + for (const v of message.certs) { + Certificate.encode(v!, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RevokedNocRootCertificates { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRevokedNocRootCertificates(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.subject = reader.string(); + break; + case 2: + message.subjectKeyId = reader.string(); + break; + case 3: + message.certs.push(Certificate.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RevokedNocRootCertificates { + return { + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + certs: Array.isArray(object?.certs) ? object.certs.map((e: any) => Certificate.fromJSON(e)) : [], + }; + }, + + toJSON(message: RevokedNocRootCertificates): unknown { + const obj: any = {}; + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + if (message.certs) { + obj.certs = message.certs.map((e) => e ? Certificate.toJSON(e) : undefined); + } else { + obj.certs = []; + } + return obj; + }, + + fromPartial, I>>(object: I): RevokedNocRootCertificates { + const message = createBaseRevokedNocRootCertificates(); + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.certs = object.certs?.map((e) => Certificate.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/revoked_root_certificates.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/revoked_root_certificates.ts new file mode 100644 index 000000000..5f0cb9051 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/revoked_root_certificates.ts @@ -0,0 +1,73 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { CertificateIdentifier } from "./certificate_identifier"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface RevokedRootCertificates { + certs: CertificateIdentifier[]; +} + +function createBaseRevokedRootCertificates(): RevokedRootCertificates { + return { certs: [] }; +} + +export const RevokedRootCertificates = { + encode(message: RevokedRootCertificates, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.certs) { + CertificateIdentifier.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RevokedRootCertificates { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRevokedRootCertificates(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.certs.push(CertificateIdentifier.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RevokedRootCertificates { + return { + certs: Array.isArray(object?.certs) ? object.certs.map((e: any) => CertificateIdentifier.fromJSON(e)) : [], + }; + }, + + toJSON(message: RevokedRootCertificates): unknown { + const obj: any = {}; + if (message.certs) { + obj.certs = message.certs.map((e) => e ? CertificateIdentifier.toJSON(e) : undefined); + } else { + obj.certs = []; + } + return obj; + }, + + fromPartial, I>>(object: I): RevokedRootCertificates { + const message = createBaseRevokedRootCertificates(); + message.certs = object.certs?.map((e) => CertificateIdentifier.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/tx.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/tx.ts new file mode 100644 index 000000000..40749344d --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/tx.ts @@ -0,0 +1,2763 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface MsgProposeAddX509RootCert { + signer: string; + cert: string; + info: string; + time: number; + vid: number; + certSchemaVersion: number; + schemaVersion: number; +} + +export interface MsgProposeAddX509RootCertResponse { +} + +export interface MsgApproveAddX509RootCert { + signer: string; + subject: string; + subjectKeyId: string; + info: string; + time: number; +} + +export interface MsgApproveAddX509RootCertResponse { +} + +export interface MsgAddX509Cert { + signer: string; + cert: string; + info: string; + time: number; + certSchemaVersion: number; + schemaVersion: number; +} + +export interface MsgAddX509CertResponse { +} + +export interface MsgProposeRevokeX509RootCert { + signer: string; + subject: string; + subjectKeyId: string; + info: string; + time: number; + serialNumber: string; + revokeChild: boolean; + schemaVersion: number; +} + +export interface MsgProposeRevokeX509RootCertResponse { +} + +export interface MsgApproveRevokeX509RootCert { + signer: string; + subject: string; + subjectKeyId: string; + info: string; + time: number; + serialNumber: string; +} + +export interface MsgApproveRevokeX509RootCertResponse { +} + +export interface MsgRevokeX509Cert { + signer: string; + subject: string; + subjectKeyId: string; + info: string; + time: number; + serialNumber: string; + revokeChild: boolean; + schemaVersion: number; +} + +export interface MsgRevokeX509CertResponse { +} + +export interface MsgRejectAddX509RootCert { + signer: string; + subject: string; + subjectKeyId: string; + info: string; + time: number; + schemaVersion: number; +} + +export interface MsgRejectAddX509RootCertResponse { +} + +export interface MsgAddPkiRevocationDistributionPoint { + signer: string; + vid: number; + pid: number; + isPAA: boolean; + label: string; + crlSignerCertificate: string; + issuerSubjectKeyID: string; + dataURL: string; + dataFileSize: number; + dataDigest: string; + dataDigestType: number; + revocationType: number; + schemaVersion: number; + crlSignerDelegator: string; +} + +export interface MsgAddPkiRevocationDistributionPointResponse { +} + +export interface MsgUpdatePkiRevocationDistributionPoint { + signer: string; + vid: number; + label: string; + crlSignerCertificate: string; + issuerSubjectKeyID: string; + dataURL: string; + dataFileSize: number; + dataDigest: string; + dataDigestType: number; + schemaVersion: number; + crlSignerDelegator: string; +} + +export interface MsgUpdatePkiRevocationDistributionPointResponse { +} + +export interface MsgDeletePkiRevocationDistributionPoint { + signer: string; + vid: number; + label: string; + issuerSubjectKeyID: string; +} + +export interface MsgDeletePkiRevocationDistributionPointResponse { +} + +export interface MsgAssignVid { + signer: string; + subject: string; + subjectKeyId: string; + vid: number; +} + +export interface MsgAssignVidResponse { +} + +export interface MsgAddNocX509RootCert { + signer: string; + cert: string; + certSchemaVersion: number; + schemaVersion: number; +} + +export interface MsgAddNocX509RootCertResponse { +} + +export interface MsgRemoveX509Cert { + signer: string; + subject: string; + subjectKeyId: string; + serialNumber: string; +} + +export interface MsgRemoveX509CertResponse { +} + +export interface MsgAddNocX509IcaCert { + signer: string; + cert: string; + certSchemaVersion: number; + schemaVersion: number; +} + +export interface MsgAddNocX509IcaCertResponse { +} + +export interface MsgRevokeNocX509RootCert { + signer: string; + subject: string; + subjectKeyId: string; + serialNumber: string; + info: string; + time: number; + revokeChild: boolean; + schemaVersion: number; +} + +export interface MsgRevokeNocX509RootCertResponse { +} + +export interface MsgRevokeNocX509IcaCert { + signer: string; + subject: string; + subjectKeyId: string; + serialNumber: string; + info: string; + time: number; + revokeChild: boolean; + schemaVersion: number; +} + +export interface MsgRevokeNocX509IcaCertResponse { +} + +function createBaseMsgProposeAddX509RootCert(): MsgProposeAddX509RootCert { + return { signer: "", cert: "", info: "", time: 0, vid: 0, certSchemaVersion: 0, schemaVersion: 0 }; +} + +export const MsgProposeAddX509RootCert = { + encode(message: MsgProposeAddX509RootCert, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.cert !== "") { + writer.uint32(18).string(message.cert); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + if (message.time !== 0) { + writer.uint32(32).int64(message.time); + } + if (message.vid !== 0) { + writer.uint32(40).int32(message.vid); + } + if (message.certSchemaVersion !== 0) { + writer.uint32(48).uint32(message.certSchemaVersion); + } + if (message.schemaVersion !== 0) { + writer.uint32(56).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgProposeAddX509RootCert { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProposeAddX509RootCert(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.cert = reader.string(); + break; + case 3: + message.info = reader.string(); + break; + case 4: + message.time = longToNumber(reader.int64() as Long); + break; + case 5: + message.vid = reader.int32(); + break; + case 6: + message.certSchemaVersion = reader.uint32(); + break; + case 7: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgProposeAddX509RootCert { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + cert: isSet(object.cert) ? String(object.cert) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + vid: isSet(object.vid) ? Number(object.vid) : 0, + certSchemaVersion: isSet(object.certSchemaVersion) ? Number(object.certSchemaVersion) : 0, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgProposeAddX509RootCert): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.cert !== undefined && (obj.cert = message.cert); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.certSchemaVersion !== undefined && (obj.certSchemaVersion = Math.round(message.certSchemaVersion)); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgProposeAddX509RootCert { + const message = createBaseMsgProposeAddX509RootCert(); + message.signer = object.signer ?? ""; + message.cert = object.cert ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + message.vid = object.vid ?? 0; + message.certSchemaVersion = object.certSchemaVersion ?? 0; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgProposeAddX509RootCertResponse(): MsgProposeAddX509RootCertResponse { + return {}; +} + +export const MsgProposeAddX509RootCertResponse = { + encode(_: MsgProposeAddX509RootCertResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgProposeAddX509RootCertResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProposeAddX509RootCertResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgProposeAddX509RootCertResponse { + return {}; + }, + + toJSON(_: MsgProposeAddX509RootCertResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): MsgProposeAddX509RootCertResponse { + const message = createBaseMsgProposeAddX509RootCertResponse(); + return message; + }, +}; + +function createBaseMsgApproveAddX509RootCert(): MsgApproveAddX509RootCert { + return { signer: "", subject: "", subjectKeyId: "", info: "", time: 0 }; +} + +export const MsgApproveAddX509RootCert = { + encode(message: MsgApproveAddX509RootCert, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.subject !== "") { + writer.uint32(18).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(26).string(message.subjectKeyId); + } + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + if (message.time !== 0) { + writer.uint32(40).int64(message.time); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgApproveAddX509RootCert { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgApproveAddX509RootCert(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.subject = reader.string(); + break; + case 3: + message.subjectKeyId = reader.string(); + break; + case 4: + message.info = reader.string(); + break; + case 5: + message.time = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgApproveAddX509RootCert { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + }; + }, + + toJSON(message: MsgApproveAddX509RootCert): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + return obj; + }, + + fromPartial, I>>(object: I): MsgApproveAddX509RootCert { + const message = createBaseMsgApproveAddX509RootCert(); + message.signer = object.signer ?? ""; + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + return message; + }, +}; + +function createBaseMsgApproveAddX509RootCertResponse(): MsgApproveAddX509RootCertResponse { + return {}; +} + +export const MsgApproveAddX509RootCertResponse = { + encode(_: MsgApproveAddX509RootCertResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgApproveAddX509RootCertResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgApproveAddX509RootCertResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgApproveAddX509RootCertResponse { + return {}; + }, + + toJSON(_: MsgApproveAddX509RootCertResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): MsgApproveAddX509RootCertResponse { + const message = createBaseMsgApproveAddX509RootCertResponse(); + return message; + }, +}; + +function createBaseMsgAddX509Cert(): MsgAddX509Cert { + return { signer: "", cert: "", info: "", time: 0, certSchemaVersion: 0, schemaVersion: 0 }; +} + +export const MsgAddX509Cert = { + encode(message: MsgAddX509Cert, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.cert !== "") { + writer.uint32(18).string(message.cert); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + if (message.time !== 0) { + writer.uint32(32).int64(message.time); + } + if (message.certSchemaVersion !== 0) { + writer.uint32(40).uint32(message.certSchemaVersion); + } + if (message.schemaVersion !== 0) { + writer.uint32(48).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAddX509Cert { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAddX509Cert(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.cert = reader.string(); + break; + case 3: + message.info = reader.string(); + break; + case 4: + message.time = longToNumber(reader.int64() as Long); + break; + case 5: + message.certSchemaVersion = reader.uint32(); + break; + case 6: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgAddX509Cert { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + cert: isSet(object.cert) ? String(object.cert) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + certSchemaVersion: isSet(object.certSchemaVersion) ? Number(object.certSchemaVersion) : 0, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgAddX509Cert): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.cert !== undefined && (obj.cert = message.cert); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + message.certSchemaVersion !== undefined && (obj.certSchemaVersion = Math.round(message.certSchemaVersion)); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgAddX509Cert { + const message = createBaseMsgAddX509Cert(); + message.signer = object.signer ?? ""; + message.cert = object.cert ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + message.certSchemaVersion = object.certSchemaVersion ?? 0; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgAddX509CertResponse(): MsgAddX509CertResponse { + return {}; +} + +export const MsgAddX509CertResponse = { + encode(_: MsgAddX509CertResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAddX509CertResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAddX509CertResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgAddX509CertResponse { + return {}; + }, + + toJSON(_: MsgAddX509CertResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgAddX509CertResponse { + const message = createBaseMsgAddX509CertResponse(); + return message; + }, +}; + +function createBaseMsgProposeRevokeX509RootCert(): MsgProposeRevokeX509RootCert { + return { + signer: "", + subject: "", + subjectKeyId: "", + info: "", + time: 0, + serialNumber: "", + revokeChild: false, + schemaVersion: 0, + }; +} + +export const MsgProposeRevokeX509RootCert = { + encode(message: MsgProposeRevokeX509RootCert, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.subject !== "") { + writer.uint32(18).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(26).string(message.subjectKeyId); + } + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + if (message.time !== 0) { + writer.uint32(40).int64(message.time); + } + if (message.serialNumber !== "") { + writer.uint32(50).string(message.serialNumber); + } + if (message.revokeChild === true) { + writer.uint32(56).bool(message.revokeChild); + } + if (message.schemaVersion !== 0) { + writer.uint32(64).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgProposeRevokeX509RootCert { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProposeRevokeX509RootCert(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.subject = reader.string(); + break; + case 3: + message.subjectKeyId = reader.string(); + break; + case 4: + message.info = reader.string(); + break; + case 5: + message.time = longToNumber(reader.int64() as Long); + break; + case 6: + message.serialNumber = reader.string(); + break; + case 7: + message.revokeChild = reader.bool(); + break; + case 8: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgProposeRevokeX509RootCert { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + serialNumber: isSet(object.serialNumber) ? String(object.serialNumber) : "", + revokeChild: isSet(object.revokeChild) ? Boolean(object.revokeChild) : false, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgProposeRevokeX509RootCert): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + message.serialNumber !== undefined && (obj.serialNumber = message.serialNumber); + message.revokeChild !== undefined && (obj.revokeChild = message.revokeChild); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgProposeRevokeX509RootCert { + const message = createBaseMsgProposeRevokeX509RootCert(); + message.signer = object.signer ?? ""; + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + message.serialNumber = object.serialNumber ?? ""; + message.revokeChild = object.revokeChild ?? false; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgProposeRevokeX509RootCertResponse(): MsgProposeRevokeX509RootCertResponse { + return {}; +} + +export const MsgProposeRevokeX509RootCertResponse = { + encode(_: MsgProposeRevokeX509RootCertResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgProposeRevokeX509RootCertResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProposeRevokeX509RootCertResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgProposeRevokeX509RootCertResponse { + return {}; + }, + + toJSON(_: MsgProposeRevokeX509RootCertResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): MsgProposeRevokeX509RootCertResponse { + const message = createBaseMsgProposeRevokeX509RootCertResponse(); + return message; + }, +}; + +function createBaseMsgApproveRevokeX509RootCert(): MsgApproveRevokeX509RootCert { + return { signer: "", subject: "", subjectKeyId: "", info: "", time: 0, serialNumber: "" }; +} + +export const MsgApproveRevokeX509RootCert = { + encode(message: MsgApproveRevokeX509RootCert, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.subject !== "") { + writer.uint32(18).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(26).string(message.subjectKeyId); + } + if (message.info !== "") { + writer.uint32(42).string(message.info); + } + if (message.time !== 0) { + writer.uint32(48).int64(message.time); + } + if (message.serialNumber !== "") { + writer.uint32(58).string(message.serialNumber); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgApproveRevokeX509RootCert { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgApproveRevokeX509RootCert(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.subject = reader.string(); + break; + case 3: + message.subjectKeyId = reader.string(); + break; + case 5: + message.info = reader.string(); + break; + case 6: + message.time = longToNumber(reader.int64() as Long); + break; + case 7: + message.serialNumber = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgApproveRevokeX509RootCert { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + serialNumber: isSet(object.serialNumber) ? String(object.serialNumber) : "", + }; + }, + + toJSON(message: MsgApproveRevokeX509RootCert): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + message.serialNumber !== undefined && (obj.serialNumber = message.serialNumber); + return obj; + }, + + fromPartial, I>>(object: I): MsgApproveRevokeX509RootCert { + const message = createBaseMsgApproveRevokeX509RootCert(); + message.signer = object.signer ?? ""; + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + message.serialNumber = object.serialNumber ?? ""; + return message; + }, +}; + +function createBaseMsgApproveRevokeX509RootCertResponse(): MsgApproveRevokeX509RootCertResponse { + return {}; +} + +export const MsgApproveRevokeX509RootCertResponse = { + encode(_: MsgApproveRevokeX509RootCertResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgApproveRevokeX509RootCertResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgApproveRevokeX509RootCertResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgApproveRevokeX509RootCertResponse { + return {}; + }, + + toJSON(_: MsgApproveRevokeX509RootCertResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): MsgApproveRevokeX509RootCertResponse { + const message = createBaseMsgApproveRevokeX509RootCertResponse(); + return message; + }, +}; + +function createBaseMsgRevokeX509Cert(): MsgRevokeX509Cert { + return { + signer: "", + subject: "", + subjectKeyId: "", + info: "", + time: 0, + serialNumber: "", + revokeChild: false, + schemaVersion: 0, + }; +} + +export const MsgRevokeX509Cert = { + encode(message: MsgRevokeX509Cert, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.subject !== "") { + writer.uint32(18).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(26).string(message.subjectKeyId); + } + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + if (message.time !== 0) { + writer.uint32(40).int64(message.time); + } + if (message.serialNumber !== "") { + writer.uint32(50).string(message.serialNumber); + } + if (message.revokeChild === true) { + writer.uint32(56).bool(message.revokeChild); + } + if (message.schemaVersion !== 0) { + writer.uint32(64).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevokeX509Cert { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRevokeX509Cert(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.subject = reader.string(); + break; + case 3: + message.subjectKeyId = reader.string(); + break; + case 4: + message.info = reader.string(); + break; + case 5: + message.time = longToNumber(reader.int64() as Long); + break; + case 6: + message.serialNumber = reader.string(); + break; + case 7: + message.revokeChild = reader.bool(); + break; + case 8: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgRevokeX509Cert { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + serialNumber: isSet(object.serialNumber) ? String(object.serialNumber) : "", + revokeChild: isSet(object.revokeChild) ? Boolean(object.revokeChild) : false, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgRevokeX509Cert): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + message.serialNumber !== undefined && (obj.serialNumber = message.serialNumber); + message.revokeChild !== undefined && (obj.revokeChild = message.revokeChild); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgRevokeX509Cert { + const message = createBaseMsgRevokeX509Cert(); + message.signer = object.signer ?? ""; + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + message.serialNumber = object.serialNumber ?? ""; + message.revokeChild = object.revokeChild ?? false; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgRevokeX509CertResponse(): MsgRevokeX509CertResponse { + return {}; +} + +export const MsgRevokeX509CertResponse = { + encode(_: MsgRevokeX509CertResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevokeX509CertResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRevokeX509CertResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgRevokeX509CertResponse { + return {}; + }, + + toJSON(_: MsgRevokeX509CertResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgRevokeX509CertResponse { + const message = createBaseMsgRevokeX509CertResponse(); + return message; + }, +}; + +function createBaseMsgRejectAddX509RootCert(): MsgRejectAddX509RootCert { + return { signer: "", subject: "", subjectKeyId: "", info: "", time: 0, schemaVersion: 0 }; +} + +export const MsgRejectAddX509RootCert = { + encode(message: MsgRejectAddX509RootCert, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.subject !== "") { + writer.uint32(18).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(26).string(message.subjectKeyId); + } + if (message.info !== "") { + writer.uint32(34).string(message.info); + } + if (message.time !== 0) { + writer.uint32(40).int64(message.time); + } + if (message.schemaVersion !== 0) { + writer.uint32(48).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRejectAddX509RootCert { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRejectAddX509RootCert(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.subject = reader.string(); + break; + case 3: + message.subjectKeyId = reader.string(); + break; + case 4: + message.info = reader.string(); + break; + case 5: + message.time = longToNumber(reader.int64() as Long); + break; + case 6: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgRejectAddX509RootCert { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgRejectAddX509RootCert): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgRejectAddX509RootCert { + const message = createBaseMsgRejectAddX509RootCert(); + message.signer = object.signer ?? ""; + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgRejectAddX509RootCertResponse(): MsgRejectAddX509RootCertResponse { + return {}; +} + +export const MsgRejectAddX509RootCertResponse = { + encode(_: MsgRejectAddX509RootCertResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRejectAddX509RootCertResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRejectAddX509RootCertResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgRejectAddX509RootCertResponse { + return {}; + }, + + toJSON(_: MsgRejectAddX509RootCertResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): MsgRejectAddX509RootCertResponse { + const message = createBaseMsgRejectAddX509RootCertResponse(); + return message; + }, +}; + +function createBaseMsgAddPkiRevocationDistributionPoint(): MsgAddPkiRevocationDistributionPoint { + return { + signer: "", + vid: 0, + pid: 0, + isPAA: false, + label: "", + crlSignerCertificate: "", + issuerSubjectKeyID: "", + dataURL: "", + dataFileSize: 0, + dataDigest: "", + dataDigestType: 0, + revocationType: 0, + schemaVersion: 0, + crlSignerDelegator: "", + }; +} + +export const MsgAddPkiRevocationDistributionPoint = { + encode(message: MsgAddPkiRevocationDistributionPoint, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.vid !== 0) { + writer.uint32(16).int32(message.vid); + } + if (message.pid !== 0) { + writer.uint32(24).int32(message.pid); + } + if (message.isPAA === true) { + writer.uint32(32).bool(message.isPAA); + } + if (message.label !== "") { + writer.uint32(42).string(message.label); + } + if (message.crlSignerCertificate !== "") { + writer.uint32(50).string(message.crlSignerCertificate); + } + if (message.issuerSubjectKeyID !== "") { + writer.uint32(58).string(message.issuerSubjectKeyID); + } + if (message.dataURL !== "") { + writer.uint32(66).string(message.dataURL); + } + if (message.dataFileSize !== 0) { + writer.uint32(72).uint64(message.dataFileSize); + } + if (message.dataDigest !== "") { + writer.uint32(82).string(message.dataDigest); + } + if (message.dataDigestType !== 0) { + writer.uint32(88).uint32(message.dataDigestType); + } + if (message.revocationType !== 0) { + writer.uint32(96).uint32(message.revocationType); + } + if (message.schemaVersion !== 0) { + writer.uint32(104).uint32(message.schemaVersion); + } + if (message.crlSignerDelegator !== "") { + writer.uint32(114).string(message.crlSignerDelegator); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAddPkiRevocationDistributionPoint { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAddPkiRevocationDistributionPoint(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.vid = reader.int32(); + break; + case 3: + message.pid = reader.int32(); + break; + case 4: + message.isPAA = reader.bool(); + break; + case 5: + message.label = reader.string(); + break; + case 6: + message.crlSignerCertificate = reader.string(); + break; + case 7: + message.issuerSubjectKeyID = reader.string(); + break; + case 8: + message.dataURL = reader.string(); + break; + case 9: + message.dataFileSize = longToNumber(reader.uint64() as Long); + break; + case 10: + message.dataDigest = reader.string(); + break; + case 11: + message.dataDigestType = reader.uint32(); + break; + case 12: + message.revocationType = reader.uint32(); + break; + case 13: + message.schemaVersion = reader.uint32(); + break; + case 14: + message.crlSignerDelegator = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgAddPkiRevocationDistributionPoint { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + pid: isSet(object.pid) ? Number(object.pid) : 0, + isPAA: isSet(object.isPAA) ? Boolean(object.isPAA) : false, + label: isSet(object.label) ? String(object.label) : "", + crlSignerCertificate: isSet(object.crlSignerCertificate) ? String(object.crlSignerCertificate) : "", + issuerSubjectKeyID: isSet(object.issuerSubjectKeyID) ? String(object.issuerSubjectKeyID) : "", + dataURL: isSet(object.dataURL) ? String(object.dataURL) : "", + dataFileSize: isSet(object.dataFileSize) ? Number(object.dataFileSize) : 0, + dataDigest: isSet(object.dataDigest) ? String(object.dataDigest) : "", + dataDigestType: isSet(object.dataDigestType) ? Number(object.dataDigestType) : 0, + revocationType: isSet(object.revocationType) ? Number(object.revocationType) : 0, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + crlSignerDelegator: isSet(object.crlSignerDelegator) ? String(object.crlSignerDelegator) : "", + }; + }, + + toJSON(message: MsgAddPkiRevocationDistributionPoint): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.pid !== undefined && (obj.pid = Math.round(message.pid)); + message.isPAA !== undefined && (obj.isPAA = message.isPAA); + message.label !== undefined && (obj.label = message.label); + message.crlSignerCertificate !== undefined && (obj.crlSignerCertificate = message.crlSignerCertificate); + message.issuerSubjectKeyID !== undefined && (obj.issuerSubjectKeyID = message.issuerSubjectKeyID); + message.dataURL !== undefined && (obj.dataURL = message.dataURL); + message.dataFileSize !== undefined && (obj.dataFileSize = Math.round(message.dataFileSize)); + message.dataDigest !== undefined && (obj.dataDigest = message.dataDigest); + message.dataDigestType !== undefined && (obj.dataDigestType = Math.round(message.dataDigestType)); + message.revocationType !== undefined && (obj.revocationType = Math.round(message.revocationType)); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + message.crlSignerDelegator !== undefined && (obj.crlSignerDelegator = message.crlSignerDelegator); + return obj; + }, + + fromPartial, I>>( + object: I, + ): MsgAddPkiRevocationDistributionPoint { + const message = createBaseMsgAddPkiRevocationDistributionPoint(); + message.signer = object.signer ?? ""; + message.vid = object.vid ?? 0; + message.pid = object.pid ?? 0; + message.isPAA = object.isPAA ?? false; + message.label = object.label ?? ""; + message.crlSignerCertificate = object.crlSignerCertificate ?? ""; + message.issuerSubjectKeyID = object.issuerSubjectKeyID ?? ""; + message.dataURL = object.dataURL ?? ""; + message.dataFileSize = object.dataFileSize ?? 0; + message.dataDigest = object.dataDigest ?? ""; + message.dataDigestType = object.dataDigestType ?? 0; + message.revocationType = object.revocationType ?? 0; + message.schemaVersion = object.schemaVersion ?? 0; + message.crlSignerDelegator = object.crlSignerDelegator ?? ""; + return message; + }, +}; + +function createBaseMsgAddPkiRevocationDistributionPointResponse(): MsgAddPkiRevocationDistributionPointResponse { + return {}; +} + +export const MsgAddPkiRevocationDistributionPointResponse = { + encode(_: MsgAddPkiRevocationDistributionPointResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAddPkiRevocationDistributionPointResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAddPkiRevocationDistributionPointResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgAddPkiRevocationDistributionPointResponse { + return {}; + }, + + toJSON(_: MsgAddPkiRevocationDistributionPointResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): MsgAddPkiRevocationDistributionPointResponse { + const message = createBaseMsgAddPkiRevocationDistributionPointResponse(); + return message; + }, +}; + +function createBaseMsgUpdatePkiRevocationDistributionPoint(): MsgUpdatePkiRevocationDistributionPoint { + return { + signer: "", + vid: 0, + label: "", + crlSignerCertificate: "", + issuerSubjectKeyID: "", + dataURL: "", + dataFileSize: 0, + dataDigest: "", + dataDigestType: 0, + schemaVersion: 0, + crlSignerDelegator: "", + }; +} + +export const MsgUpdatePkiRevocationDistributionPoint = { + encode(message: MsgUpdatePkiRevocationDistributionPoint, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.vid !== 0) { + writer.uint32(16).int32(message.vid); + } + if (message.label !== "") { + writer.uint32(26).string(message.label); + } + if (message.crlSignerCertificate !== "") { + writer.uint32(34).string(message.crlSignerCertificate); + } + if (message.issuerSubjectKeyID !== "") { + writer.uint32(42).string(message.issuerSubjectKeyID); + } + if (message.dataURL !== "") { + writer.uint32(50).string(message.dataURL); + } + if (message.dataFileSize !== 0) { + writer.uint32(56).uint64(message.dataFileSize); + } + if (message.dataDigest !== "") { + writer.uint32(66).string(message.dataDigest); + } + if (message.dataDigestType !== 0) { + writer.uint32(72).uint32(message.dataDigestType); + } + if (message.schemaVersion !== 0) { + writer.uint32(80).uint32(message.schemaVersion); + } + if (message.crlSignerDelegator !== "") { + writer.uint32(90).string(message.crlSignerDelegator); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdatePkiRevocationDistributionPoint { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdatePkiRevocationDistributionPoint(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.vid = reader.int32(); + break; + case 3: + message.label = reader.string(); + break; + case 4: + message.crlSignerCertificate = reader.string(); + break; + case 5: + message.issuerSubjectKeyID = reader.string(); + break; + case 6: + message.dataURL = reader.string(); + break; + case 7: + message.dataFileSize = longToNumber(reader.uint64() as Long); + break; + case 8: + message.dataDigest = reader.string(); + break; + case 9: + message.dataDigestType = reader.uint32(); + break; + case 10: + message.schemaVersion = reader.uint32(); + break; + case 11: + message.crlSignerDelegator = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgUpdatePkiRevocationDistributionPoint { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + label: isSet(object.label) ? String(object.label) : "", + crlSignerCertificate: isSet(object.crlSignerCertificate) ? String(object.crlSignerCertificate) : "", + issuerSubjectKeyID: isSet(object.issuerSubjectKeyID) ? String(object.issuerSubjectKeyID) : "", + dataURL: isSet(object.dataURL) ? String(object.dataURL) : "", + dataFileSize: isSet(object.dataFileSize) ? Number(object.dataFileSize) : 0, + dataDigest: isSet(object.dataDigest) ? String(object.dataDigest) : "", + dataDigestType: isSet(object.dataDigestType) ? Number(object.dataDigestType) : 0, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + crlSignerDelegator: isSet(object.crlSignerDelegator) ? String(object.crlSignerDelegator) : "", + }; + }, + + toJSON(message: MsgUpdatePkiRevocationDistributionPoint): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.label !== undefined && (obj.label = message.label); + message.crlSignerCertificate !== undefined && (obj.crlSignerCertificate = message.crlSignerCertificate); + message.issuerSubjectKeyID !== undefined && (obj.issuerSubjectKeyID = message.issuerSubjectKeyID); + message.dataURL !== undefined && (obj.dataURL = message.dataURL); + message.dataFileSize !== undefined && (obj.dataFileSize = Math.round(message.dataFileSize)); + message.dataDigest !== undefined && (obj.dataDigest = message.dataDigest); + message.dataDigestType !== undefined && (obj.dataDigestType = Math.round(message.dataDigestType)); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + message.crlSignerDelegator !== undefined && (obj.crlSignerDelegator = message.crlSignerDelegator); + return obj; + }, + + fromPartial, I>>( + object: I, + ): MsgUpdatePkiRevocationDistributionPoint { + const message = createBaseMsgUpdatePkiRevocationDistributionPoint(); + message.signer = object.signer ?? ""; + message.vid = object.vid ?? 0; + message.label = object.label ?? ""; + message.crlSignerCertificate = object.crlSignerCertificate ?? ""; + message.issuerSubjectKeyID = object.issuerSubjectKeyID ?? ""; + message.dataURL = object.dataURL ?? ""; + message.dataFileSize = object.dataFileSize ?? 0; + message.dataDigest = object.dataDigest ?? ""; + message.dataDigestType = object.dataDigestType ?? 0; + message.schemaVersion = object.schemaVersion ?? 0; + message.crlSignerDelegator = object.crlSignerDelegator ?? ""; + return message; + }, +}; + +function createBaseMsgUpdatePkiRevocationDistributionPointResponse(): MsgUpdatePkiRevocationDistributionPointResponse { + return {}; +} + +export const MsgUpdatePkiRevocationDistributionPointResponse = { + encode(_: MsgUpdatePkiRevocationDistributionPointResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdatePkiRevocationDistributionPointResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdatePkiRevocationDistributionPointResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgUpdatePkiRevocationDistributionPointResponse { + return {}; + }, + + toJSON(_: MsgUpdatePkiRevocationDistributionPointResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): MsgUpdatePkiRevocationDistributionPointResponse { + const message = createBaseMsgUpdatePkiRevocationDistributionPointResponse(); + return message; + }, +}; + +function createBaseMsgDeletePkiRevocationDistributionPoint(): MsgDeletePkiRevocationDistributionPoint { + return { signer: "", vid: 0, label: "", issuerSubjectKeyID: "" }; +} + +export const MsgDeletePkiRevocationDistributionPoint = { + encode(message: MsgDeletePkiRevocationDistributionPoint, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.vid !== 0) { + writer.uint32(16).int32(message.vid); + } + if (message.label !== "") { + writer.uint32(26).string(message.label); + } + if (message.issuerSubjectKeyID !== "") { + writer.uint32(34).string(message.issuerSubjectKeyID); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDeletePkiRevocationDistributionPoint { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDeletePkiRevocationDistributionPoint(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.vid = reader.int32(); + break; + case 3: + message.label = reader.string(); + break; + case 4: + message.issuerSubjectKeyID = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgDeletePkiRevocationDistributionPoint { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + label: isSet(object.label) ? String(object.label) : "", + issuerSubjectKeyID: isSet(object.issuerSubjectKeyID) ? String(object.issuerSubjectKeyID) : "", + }; + }, + + toJSON(message: MsgDeletePkiRevocationDistributionPoint): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + message.label !== undefined && (obj.label = message.label); + message.issuerSubjectKeyID !== undefined && (obj.issuerSubjectKeyID = message.issuerSubjectKeyID); + return obj; + }, + + fromPartial, I>>( + object: I, + ): MsgDeletePkiRevocationDistributionPoint { + const message = createBaseMsgDeletePkiRevocationDistributionPoint(); + message.signer = object.signer ?? ""; + message.vid = object.vid ?? 0; + message.label = object.label ?? ""; + message.issuerSubjectKeyID = object.issuerSubjectKeyID ?? ""; + return message; + }, +}; + +function createBaseMsgDeletePkiRevocationDistributionPointResponse(): MsgDeletePkiRevocationDistributionPointResponse { + return {}; +} + +export const MsgDeletePkiRevocationDistributionPointResponse = { + encode(_: MsgDeletePkiRevocationDistributionPointResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDeletePkiRevocationDistributionPointResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDeletePkiRevocationDistributionPointResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgDeletePkiRevocationDistributionPointResponse { + return {}; + }, + + toJSON(_: MsgDeletePkiRevocationDistributionPointResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): MsgDeletePkiRevocationDistributionPointResponse { + const message = createBaseMsgDeletePkiRevocationDistributionPointResponse(); + return message; + }, +}; + +function createBaseMsgAssignVid(): MsgAssignVid { + return { signer: "", subject: "", subjectKeyId: "", vid: 0 }; +} + +export const MsgAssignVid = { + encode(message: MsgAssignVid, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.subject !== "") { + writer.uint32(18).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(26).string(message.subjectKeyId); + } + if (message.vid !== 0) { + writer.uint32(32).int32(message.vid); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAssignVid { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAssignVid(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.subject = reader.string(); + break; + case 3: + message.subjectKeyId = reader.string(); + break; + case 4: + message.vid = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgAssignVid { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + vid: isSet(object.vid) ? Number(object.vid) : 0, + }; + }, + + toJSON(message: MsgAssignVid): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + message.vid !== undefined && (obj.vid = Math.round(message.vid)); + return obj; + }, + + fromPartial, I>>(object: I): MsgAssignVid { + const message = createBaseMsgAssignVid(); + message.signer = object.signer ?? ""; + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.vid = object.vid ?? 0; + return message; + }, +}; + +function createBaseMsgAssignVidResponse(): MsgAssignVidResponse { + return {}; +} + +export const MsgAssignVidResponse = { + encode(_: MsgAssignVidResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAssignVidResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAssignVidResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgAssignVidResponse { + return {}; + }, + + toJSON(_: MsgAssignVidResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgAssignVidResponse { + const message = createBaseMsgAssignVidResponse(); + return message; + }, +}; + +function createBaseMsgAddNocX509RootCert(): MsgAddNocX509RootCert { + return { signer: "", cert: "", certSchemaVersion: 0, schemaVersion: 0 }; +} + +export const MsgAddNocX509RootCert = { + encode(message: MsgAddNocX509RootCert, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.cert !== "") { + writer.uint32(18).string(message.cert); + } + if (message.certSchemaVersion !== 0) { + writer.uint32(32).uint32(message.certSchemaVersion); + } + if (message.schemaVersion !== 0) { + writer.uint32(40).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAddNocX509RootCert { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAddNocX509RootCert(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.cert = reader.string(); + break; + case 4: + message.certSchemaVersion = reader.uint32(); + break; + case 5: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgAddNocX509RootCert { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + cert: isSet(object.cert) ? String(object.cert) : "", + certSchemaVersion: isSet(object.certSchemaVersion) ? Number(object.certSchemaVersion) : 0, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgAddNocX509RootCert): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.cert !== undefined && (obj.cert = message.cert); + message.certSchemaVersion !== undefined && (obj.certSchemaVersion = Math.round(message.certSchemaVersion)); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgAddNocX509RootCert { + const message = createBaseMsgAddNocX509RootCert(); + message.signer = object.signer ?? ""; + message.cert = object.cert ?? ""; + message.certSchemaVersion = object.certSchemaVersion ?? 0; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgAddNocX509RootCertResponse(): MsgAddNocX509RootCertResponse { + return {}; +} + +export const MsgAddNocX509RootCertResponse = { + encode(_: MsgAddNocX509RootCertResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAddNocX509RootCertResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAddNocX509RootCertResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgAddNocX509RootCertResponse { + return {}; + }, + + toJSON(_: MsgAddNocX509RootCertResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgAddNocX509RootCertResponse { + const message = createBaseMsgAddNocX509RootCertResponse(); + return message; + }, +}; + +function createBaseMsgRemoveX509Cert(): MsgRemoveX509Cert { + return { signer: "", subject: "", subjectKeyId: "", serialNumber: "" }; +} + +export const MsgRemoveX509Cert = { + encode(message: MsgRemoveX509Cert, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.subject !== "") { + writer.uint32(18).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(26).string(message.subjectKeyId); + } + if (message.serialNumber !== "") { + writer.uint32(34).string(message.serialNumber); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRemoveX509Cert { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRemoveX509Cert(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.subject = reader.string(); + break; + case 3: + message.subjectKeyId = reader.string(); + break; + case 4: + message.serialNumber = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgRemoveX509Cert { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + serialNumber: isSet(object.serialNumber) ? String(object.serialNumber) : "", + }; + }, + + toJSON(message: MsgRemoveX509Cert): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + message.serialNumber !== undefined && (obj.serialNumber = message.serialNumber); + return obj; + }, + + fromPartial, I>>(object: I): MsgRemoveX509Cert { + const message = createBaseMsgRemoveX509Cert(); + message.signer = object.signer ?? ""; + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.serialNumber = object.serialNumber ?? ""; + return message; + }, +}; + +function createBaseMsgRemoveX509CertResponse(): MsgRemoveX509CertResponse { + return {}; +} + +export const MsgRemoveX509CertResponse = { + encode(_: MsgRemoveX509CertResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRemoveX509CertResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRemoveX509CertResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgRemoveX509CertResponse { + return {}; + }, + + toJSON(_: MsgRemoveX509CertResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgRemoveX509CertResponse { + const message = createBaseMsgRemoveX509CertResponse(); + return message; + }, +}; + +function createBaseMsgAddNocX509IcaCert(): MsgAddNocX509IcaCert { + return { signer: "", cert: "", certSchemaVersion: 0, schemaVersion: 0 }; +} + +export const MsgAddNocX509IcaCert = { + encode(message: MsgAddNocX509IcaCert, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.cert !== "") { + writer.uint32(18).string(message.cert); + } + if (message.certSchemaVersion !== 0) { + writer.uint32(24).uint32(message.certSchemaVersion); + } + if (message.schemaVersion !== 0) { + writer.uint32(32).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAddNocX509IcaCert { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAddNocX509IcaCert(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.cert = reader.string(); + break; + case 3: + message.certSchemaVersion = reader.uint32(); + break; + case 4: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgAddNocX509IcaCert { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + cert: isSet(object.cert) ? String(object.cert) : "", + certSchemaVersion: isSet(object.certSchemaVersion) ? Number(object.certSchemaVersion) : 0, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgAddNocX509IcaCert): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.cert !== undefined && (obj.cert = message.cert); + message.certSchemaVersion !== undefined && (obj.certSchemaVersion = Math.round(message.certSchemaVersion)); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgAddNocX509IcaCert { + const message = createBaseMsgAddNocX509IcaCert(); + message.signer = object.signer ?? ""; + message.cert = object.cert ?? ""; + message.certSchemaVersion = object.certSchemaVersion ?? 0; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgAddNocX509IcaCertResponse(): MsgAddNocX509IcaCertResponse { + return {}; +} + +export const MsgAddNocX509IcaCertResponse = { + encode(_: MsgAddNocX509IcaCertResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgAddNocX509IcaCertResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgAddNocX509IcaCertResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgAddNocX509IcaCertResponse { + return {}; + }, + + toJSON(_: MsgAddNocX509IcaCertResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgAddNocX509IcaCertResponse { + const message = createBaseMsgAddNocX509IcaCertResponse(); + return message; + }, +}; + +function createBaseMsgRevokeNocX509RootCert(): MsgRevokeNocX509RootCert { + return { + signer: "", + subject: "", + subjectKeyId: "", + serialNumber: "", + info: "", + time: 0, + revokeChild: false, + schemaVersion: 0, + }; +} + +export const MsgRevokeNocX509RootCert = { + encode(message: MsgRevokeNocX509RootCert, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.subject !== "") { + writer.uint32(18).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(26).string(message.subjectKeyId); + } + if (message.serialNumber !== "") { + writer.uint32(34).string(message.serialNumber); + } + if (message.info !== "") { + writer.uint32(42).string(message.info); + } + if (message.time !== 0) { + writer.uint32(48).int64(message.time); + } + if (message.revokeChild === true) { + writer.uint32(56).bool(message.revokeChild); + } + if (message.schemaVersion !== 0) { + writer.uint32(64).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevokeNocX509RootCert { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRevokeNocX509RootCert(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.subject = reader.string(); + break; + case 3: + message.subjectKeyId = reader.string(); + break; + case 4: + message.serialNumber = reader.string(); + break; + case 5: + message.info = reader.string(); + break; + case 6: + message.time = longToNumber(reader.int64() as Long); + break; + case 7: + message.revokeChild = reader.bool(); + break; + case 8: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgRevokeNocX509RootCert { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + serialNumber: isSet(object.serialNumber) ? String(object.serialNumber) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + revokeChild: isSet(object.revokeChild) ? Boolean(object.revokeChild) : false, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgRevokeNocX509RootCert): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + message.serialNumber !== undefined && (obj.serialNumber = message.serialNumber); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + message.revokeChild !== undefined && (obj.revokeChild = message.revokeChild); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgRevokeNocX509RootCert { + const message = createBaseMsgRevokeNocX509RootCert(); + message.signer = object.signer ?? ""; + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.serialNumber = object.serialNumber ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + message.revokeChild = object.revokeChild ?? false; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgRevokeNocX509RootCertResponse(): MsgRevokeNocX509RootCertResponse { + return {}; +} + +export const MsgRevokeNocX509RootCertResponse = { + encode(_: MsgRevokeNocX509RootCertResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevokeNocX509RootCertResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRevokeNocX509RootCertResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgRevokeNocX509RootCertResponse { + return {}; + }, + + toJSON(_: MsgRevokeNocX509RootCertResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): MsgRevokeNocX509RootCertResponse { + const message = createBaseMsgRevokeNocX509RootCertResponse(); + return message; + }, +}; + +function createBaseMsgRevokeNocX509IcaCert(): MsgRevokeNocX509IcaCert { + return { + signer: "", + subject: "", + subjectKeyId: "", + serialNumber: "", + info: "", + time: 0, + revokeChild: false, + schemaVersion: 0, + }; +} + +export const MsgRevokeNocX509IcaCert = { + encode(message: MsgRevokeNocX509IcaCert, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.subject !== "") { + writer.uint32(18).string(message.subject); + } + if (message.subjectKeyId !== "") { + writer.uint32(26).string(message.subjectKeyId); + } + if (message.serialNumber !== "") { + writer.uint32(34).string(message.serialNumber); + } + if (message.info !== "") { + writer.uint32(42).string(message.info); + } + if (message.time !== 0) { + writer.uint32(48).int64(message.time); + } + if (message.revokeChild === true) { + writer.uint32(56).bool(message.revokeChild); + } + if (message.schemaVersion !== 0) { + writer.uint32(64).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevokeNocX509IcaCert { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRevokeNocX509IcaCert(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.subject = reader.string(); + break; + case 3: + message.subjectKeyId = reader.string(); + break; + case 4: + message.serialNumber = reader.string(); + break; + case 5: + message.info = reader.string(); + break; + case 6: + message.time = longToNumber(reader.int64() as Long); + break; + case 7: + message.revokeChild = reader.bool(); + break; + case 8: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgRevokeNocX509IcaCert { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + subject: isSet(object.subject) ? String(object.subject) : "", + subjectKeyId: isSet(object.subjectKeyId) ? String(object.subjectKeyId) : "", + serialNumber: isSet(object.serialNumber) ? String(object.serialNumber) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + revokeChild: isSet(object.revokeChild) ? Boolean(object.revokeChild) : false, + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgRevokeNocX509IcaCert): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.subject !== undefined && (obj.subject = message.subject); + message.subjectKeyId !== undefined && (obj.subjectKeyId = message.subjectKeyId); + message.serialNumber !== undefined && (obj.serialNumber = message.serialNumber); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + message.revokeChild !== undefined && (obj.revokeChild = message.revokeChild); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgRevokeNocX509IcaCert { + const message = createBaseMsgRevokeNocX509IcaCert(); + message.signer = object.signer ?? ""; + message.subject = object.subject ?? ""; + message.subjectKeyId = object.subjectKeyId ?? ""; + message.serialNumber = object.serialNumber ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + message.revokeChild = object.revokeChild ?? false; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgRevokeNocX509IcaCertResponse(): MsgRevokeNocX509IcaCertResponse { + return {}; +} + +export const MsgRevokeNocX509IcaCertResponse = { + encode(_: MsgRevokeNocX509IcaCertResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRevokeNocX509IcaCertResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRevokeNocX509IcaCertResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgRevokeNocX509IcaCertResponse { + return {}; + }, + + toJSON(_: MsgRevokeNocX509IcaCertResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgRevokeNocX509IcaCertResponse { + const message = createBaseMsgRevokeNocX509IcaCertResponse(); + return message; + }, +}; + +/** Msg defines the Msg service. */ +export interface Msg { + ProposeAddX509RootCert(request: MsgProposeAddX509RootCert): Promise; + ApproveAddX509RootCert(request: MsgApproveAddX509RootCert): Promise; + AddX509Cert(request: MsgAddX509Cert): Promise; + ProposeRevokeX509RootCert(request: MsgProposeRevokeX509RootCert): Promise; + ApproveRevokeX509RootCert(request: MsgApproveRevokeX509RootCert): Promise; + RevokeX509Cert(request: MsgRevokeX509Cert): Promise; + RejectAddX509RootCert(request: MsgRejectAddX509RootCert): Promise; + AddPkiRevocationDistributionPoint( + request: MsgAddPkiRevocationDistributionPoint, + ): Promise; + UpdatePkiRevocationDistributionPoint( + request: MsgUpdatePkiRevocationDistributionPoint, + ): Promise; + DeletePkiRevocationDistributionPoint( + request: MsgDeletePkiRevocationDistributionPoint, + ): Promise; + AssignVid(request: MsgAssignVid): Promise; + AddNocX509RootCert(request: MsgAddNocX509RootCert): Promise; + RemoveX509Cert(request: MsgRemoveX509Cert): Promise; + AddNocX509IcaCert(request: MsgAddNocX509IcaCert): Promise; + RevokeNocX509RootCert(request: MsgRevokeNocX509RootCert): Promise; + /** this line is used by starport scaffolding # proto/tx/rpc */ + RevokeNocX509IcaCert(request: MsgRevokeNocX509IcaCert): Promise; +} + +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.ProposeAddX509RootCert = this.ProposeAddX509RootCert.bind(this); + this.ApproveAddX509RootCert = this.ApproveAddX509RootCert.bind(this); + this.AddX509Cert = this.AddX509Cert.bind(this); + this.ProposeRevokeX509RootCert = this.ProposeRevokeX509RootCert.bind(this); + this.ApproveRevokeX509RootCert = this.ApproveRevokeX509RootCert.bind(this); + this.RevokeX509Cert = this.RevokeX509Cert.bind(this); + this.RejectAddX509RootCert = this.RejectAddX509RootCert.bind(this); + this.AddPkiRevocationDistributionPoint = this.AddPkiRevocationDistributionPoint.bind(this); + this.UpdatePkiRevocationDistributionPoint = this.UpdatePkiRevocationDistributionPoint.bind(this); + this.DeletePkiRevocationDistributionPoint = this.DeletePkiRevocationDistributionPoint.bind(this); + this.AssignVid = this.AssignVid.bind(this); + this.AddNocX509RootCert = this.AddNocX509RootCert.bind(this); + this.RemoveX509Cert = this.RemoveX509Cert.bind(this); + this.AddNocX509IcaCert = this.AddNocX509IcaCert.bind(this); + this.RevokeNocX509RootCert = this.RevokeNocX509RootCert.bind(this); + this.RevokeNocX509IcaCert = this.RevokeNocX509IcaCert.bind(this); + } + ProposeAddX509RootCert(request: MsgProposeAddX509RootCert): Promise { + const data = MsgProposeAddX509RootCert.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Msg", + "ProposeAddX509RootCert", + data, + ); + return promise.then((data) => MsgProposeAddX509RootCertResponse.decode(new _m0.Reader(data))); + } + + ApproveAddX509RootCert(request: MsgApproveAddX509RootCert): Promise { + const data = MsgApproveAddX509RootCert.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Msg", + "ApproveAddX509RootCert", + data, + ); + return promise.then((data) => MsgApproveAddX509RootCertResponse.decode(new _m0.Reader(data))); + } + + AddX509Cert(request: MsgAddX509Cert): Promise { + const data = MsgAddX509Cert.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.pki.Msg", "AddX509Cert", data); + return promise.then((data) => MsgAddX509CertResponse.decode(new _m0.Reader(data))); + } + + ProposeRevokeX509RootCert(request: MsgProposeRevokeX509RootCert): Promise { + const data = MsgProposeRevokeX509RootCert.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Msg", + "ProposeRevokeX509RootCert", + data, + ); + return promise.then((data) => MsgProposeRevokeX509RootCertResponse.decode(new _m0.Reader(data))); + } + + ApproveRevokeX509RootCert(request: MsgApproveRevokeX509RootCert): Promise { + const data = MsgApproveRevokeX509RootCert.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Msg", + "ApproveRevokeX509RootCert", + data, + ); + return promise.then((data) => MsgApproveRevokeX509RootCertResponse.decode(new _m0.Reader(data))); + } + + RevokeX509Cert(request: MsgRevokeX509Cert): Promise { + const data = MsgRevokeX509Cert.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.pki.Msg", "RevokeX509Cert", data); + return promise.then((data) => MsgRevokeX509CertResponse.decode(new _m0.Reader(data))); + } + + RejectAddX509RootCert(request: MsgRejectAddX509RootCert): Promise { + const data = MsgRejectAddX509RootCert.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Msg", + "RejectAddX509RootCert", + data, + ); + return promise.then((data) => MsgRejectAddX509RootCertResponse.decode(new _m0.Reader(data))); + } + + AddPkiRevocationDistributionPoint( + request: MsgAddPkiRevocationDistributionPoint, + ): Promise { + const data = MsgAddPkiRevocationDistributionPoint.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Msg", + "AddPkiRevocationDistributionPoint", + data, + ); + return promise.then((data) => MsgAddPkiRevocationDistributionPointResponse.decode(new _m0.Reader(data))); + } + + UpdatePkiRevocationDistributionPoint( + request: MsgUpdatePkiRevocationDistributionPoint, + ): Promise { + const data = MsgUpdatePkiRevocationDistributionPoint.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Msg", + "UpdatePkiRevocationDistributionPoint", + data, + ); + return promise.then((data) => MsgUpdatePkiRevocationDistributionPointResponse.decode(new _m0.Reader(data))); + } + + DeletePkiRevocationDistributionPoint( + request: MsgDeletePkiRevocationDistributionPoint, + ): Promise { + const data = MsgDeletePkiRevocationDistributionPoint.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Msg", + "DeletePkiRevocationDistributionPoint", + data, + ); + return promise.then((data) => MsgDeletePkiRevocationDistributionPointResponse.decode(new _m0.Reader(data))); + } + + AssignVid(request: MsgAssignVid): Promise { + const data = MsgAssignVid.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.pki.Msg", "AssignVid", data); + return promise.then((data) => MsgAssignVidResponse.decode(new _m0.Reader(data))); + } + + AddNocX509RootCert(request: MsgAddNocX509RootCert): Promise { + const data = MsgAddNocX509RootCert.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.pki.Msg", "AddNocX509RootCert", data); + return promise.then((data) => MsgAddNocX509RootCertResponse.decode(new _m0.Reader(data))); + } + + RemoveX509Cert(request: MsgRemoveX509Cert): Promise { + const data = MsgRemoveX509Cert.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.pki.Msg", "RemoveX509Cert", data); + return promise.then((data) => MsgRemoveX509CertResponse.decode(new _m0.Reader(data))); + } + + AddNocX509IcaCert(request: MsgAddNocX509IcaCert): Promise { + const data = MsgAddNocX509IcaCert.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.pki.Msg", "AddNocX509IcaCert", data); + return promise.then((data) => MsgAddNocX509IcaCertResponse.decode(new _m0.Reader(data))); + } + + RevokeNocX509RootCert(request: MsgRevokeNocX509RootCert): Promise { + const data = MsgRevokeNocX509RootCert.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Msg", + "RevokeNocX509RootCert", + data, + ); + return promise.then((data) => MsgRevokeNocX509RootCertResponse.decode(new _m0.Reader(data))); + } + + RevokeNocX509IcaCert(request: MsgRevokeNocX509IcaCert): Promise { + const data = MsgRevokeNocX509IcaCert.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.pki.Msg", + "RevokeNocX509IcaCert", + data, + ); + return promise.then((data) => MsgRevokeNocX509IcaCertResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/unique_certificate.ts b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/unique_certificate.ts new file mode 100644 index 000000000..6106c82cf --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.pki/types/zigbeealliance/distributedcomplianceledger/pki/unique_certificate.ts @@ -0,0 +1,92 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.pki"; + +export interface UniqueCertificate { + issuer: string; + serialNumber: string; + present: boolean; +} + +function createBaseUniqueCertificate(): UniqueCertificate { + return { issuer: "", serialNumber: "", present: false }; +} + +export const UniqueCertificate = { + encode(message: UniqueCertificate, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.issuer !== "") { + writer.uint32(10).string(message.issuer); + } + if (message.serialNumber !== "") { + writer.uint32(18).string(message.serialNumber); + } + if (message.present === true) { + writer.uint32(24).bool(message.present); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UniqueCertificate { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUniqueCertificate(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.issuer = reader.string(); + break; + case 2: + message.serialNumber = reader.string(); + break; + case 3: + message.present = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UniqueCertificate { + return { + issuer: isSet(object.issuer) ? String(object.issuer) : "", + serialNumber: isSet(object.serialNumber) ? String(object.serialNumber) : "", + present: isSet(object.present) ? Boolean(object.present) : false, + }; + }, + + toJSON(message: UniqueCertificate): unknown { + const obj: any = {}; + message.issuer !== undefined && (obj.issuer = message.issuer); + message.serialNumber !== undefined && (obj.serialNumber = message.serialNumber); + message.present !== undefined && (obj.present = message.present); + return obj; + }, + + fromPartial, I>>(object: I): UniqueCertificate { + const message = createBaseUniqueCertificate(); + message.issuer = object.issuer ?? ""; + message.serialNumber = object.serialNumber ?? ""; + message.present = object.present ?? false; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/api.swagger.yml b/ts-client/zigbeealliance.distributedcomplianceledger.validator/api.swagger.yml new file mode 100644 index 000000000..714500ad1 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/api.swagger.yml @@ -0,0 +1,1322 @@ +swagger: '2.0' +info: + title: HTTP API Console zigbeealliance.distributedcomplianceledger.validator + name: '' + description: '' +paths: + /dcl/validator/disabled-nodes: + get: + operationId: DisabledValidatorAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + disabledValidator: + type: array + items: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + disabledByNodeAdmin: + type: boolean + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/validator/disabled-nodes/{address}: + get: + operationId: DisabledValidator + responses: + '200': + description: A successful response. + schema: + type: object + properties: + disabledValidator: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + disabledByNodeAdmin: + type: boolean + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: address + in: path + required: true + type: string + tags: + - Query + /dcl/validator/last-powers: + get: + operationId: LastValidatorPowerAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + lastValidatorPower: + type: array + items: + type: object + properties: + owner: + type: string + power: + type: integer + format: int32 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/validator/last-powers/{owner}: + get: + operationId: LastValidatorPower + responses: + '200': + description: A successful response. + schema: + type: object + properties: + lastValidatorPower: + type: object + properties: + owner: + type: string + power: + type: integer + format: int32 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: owner + in: path + required: true + type: string + tags: + - Query + /dcl/validator/nodes: + get: + operationId: ValidatorAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + validator: + type: array + items: + type: object + properties: + owner: + type: string + description: + type: object + properties: + moniker: + type: string + identity: + type: string + website: + type: string + details: + type: string + pubKey: + type: object + properties: + '@type': + type: string + additionalProperties: {} + power: + type: integer + format: int32 + jailed: + type: boolean + jailedReason: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/validator/nodes/{owner}: + get: + operationId: Validator + responses: + '200': + description: A successful response. + schema: + type: object + properties: + validator: + type: object + properties: + owner: + type: string + description: + type: object + properties: + moniker: + type: string + identity: + type: string + website: + type: string + details: + type: string + pubKey: + type: object + properties: + '@type': + type: string + additionalProperties: {} + power: + type: integer + format: int32 + jailed: + type: boolean + jailedReason: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: owner + in: path + required: true + type: string + tags: + - Query + /dcl/validator/proposed-disable-nodes: + get: + operationId: ProposedDisableValidatorAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + proposedDisableValidator: + type: array + items: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/validator/proposed-disable-nodes/{address}: + get: + operationId: ProposedDisableValidator + responses: + '200': + description: A successful response. + schema: + type: object + properties: + proposedDisableValidator: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: address + in: path + required: true + type: string + tags: + - Query + /dcl/validator/rejected-disable-nodes: + get: + operationId: RejectedDisableValidatorAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + rejectedValidator: + type: array + items: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/validator/rejected-disable-nodes/{owner}: + get: + operationId: RejectedDisableValidator + responses: + '200': + description: A successful response. + schema: + type: object + properties: + rejectedValidator: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: owner + in: path + required: true + type: string + tags: + - Query +definitions: + Any: + type: object + properties: + '@type': + type: string + additionalProperties: {} + Status: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + Description: + type: object + properties: + moniker: + type: string + identity: + type: string + website: + type: string + details: + type: string + Grant: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + PageRequest: + type: object + properties: + key: + type: string + format: byte + offset: + type: string + format: uint64 + limit: + type: string + format: uint64 + count_total: + type: boolean + reverse: + type: boolean + PageResponse: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllDisabledValidatorResponse: + type: object + properties: + disabledValidator: + type: array + items: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + disabledByNodeAdmin: + type: boolean + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllLastValidatorPowerResponse: + type: object + properties: + lastValidatorPower: + type: array + items: + type: object + properties: + owner: + type: string + power: + type: integer + format: int32 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllProposedDisableValidatorResponse: + type: object + properties: + proposedDisableValidator: + type: array + items: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllRejectedDisableValidatorResponse: + type: object + properties: + rejectedValidator: + type: array + items: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllValidatorResponse: + type: object + properties: + validator: + type: array + items: + type: object + properties: + owner: + type: string + description: + type: object + properties: + moniker: + type: string + identity: + type: string + website: + type: string + details: + type: string + pubKey: + type: object + properties: + '@type': + type: string + additionalProperties: {} + power: + type: integer + format: int32 + jailed: + type: boolean + jailedReason: + type: string + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryGetDisabledValidatorResponse: + type: object + properties: + disabledValidator: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + disabledByNodeAdmin: + type: boolean + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + QueryGetLastValidatorPowerResponse: + type: object + properties: + lastValidatorPower: + type: object + properties: + owner: + type: string + power: + type: integer + format: int32 + QueryGetProposedDisableValidatorResponse: + type: object + properties: + proposedDisableValidator: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + QueryGetRejectedDisableValidatorResponse: + type: object + properties: + rejectedValidator: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + QueryGetValidatorResponse: + type: object + properties: + validator: + type: object + properties: + owner: + type: string + description: + type: object + properties: + moniker: + type: string + identity: + type: string + website: + type: string + details: + type: string + pubKey: + type: object + properties: + '@type': + type: string + additionalProperties: {} + power: + type: integer + format: int32 + jailed: + type: boolean + jailedReason: + type: string + validator.DisabledValidator: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + disabledByNodeAdmin: + type: boolean + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + validator.LastValidatorPower: + type: object + properties: + owner: + type: string + power: + type: integer + format: int32 + validator.ProposedDisableValidator: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + validator.RejectedDisableValidator: + type: object + properties: + address: + type: string + creator: + type: string + approvals: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + rejects: + type: array + items: + type: object + properties: + address: + type: string + time: + type: string + format: int64 + info: + type: string + validator.Validator: + type: object + properties: + owner: + type: string + description: + type: object + properties: + moniker: + type: string + identity: + type: string + website: + type: string + details: + type: string + pubKey: + type: object + properties: + '@type': + type: string + additionalProperties: {} + power: + type: integer + format: int32 + jailed: + type: boolean + jailedReason: + type: string + MsgApproveDisableValidatorResponse: + type: object + MsgCreateValidatorResponse: + type: object + MsgDisableValidatorResponse: + type: object + MsgEnableValidatorResponse: + type: object + MsgProposeDisableValidatorResponse: + type: object + MsgRejectDisableValidatorResponse: + type: object diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/index.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/module.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/module.ts new file mode 100755 index 000000000..37803b1b8 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/module.ts @@ -0,0 +1,308 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; +import { MsgCreateValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/tx"; +import { MsgRejectDisableValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/tx"; +import { MsgEnableValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/tx"; +import { MsgApproveDisableValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/tx"; +import { MsgProposeDisableValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/tx"; +import { MsgDisableValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/tx"; + +import { Description as typeDescription} from "./types" +import { DisabledValidator as typeDisabledValidator} from "./types" +import { Grant as typeGrant} from "./types" +import { LastValidatorPower as typeLastValidatorPower} from "./types" +import { ProposedDisableValidator as typeProposedDisableValidator} from "./types" +import { RejectedDisableValidator as typeRejectedDisableValidator} from "./types" +import { Validator as typeValidator} from "./types" + +export { MsgCreateValidator, MsgRejectDisableValidator, MsgEnableValidator, MsgApproveDisableValidator, MsgProposeDisableValidator, MsgDisableValidator }; + +type sendMsgCreateValidatorParams = { + value: MsgCreateValidator, + fee?: StdFee, + memo?: string +}; + +type sendMsgRejectDisableValidatorParams = { + value: MsgRejectDisableValidator, + fee?: StdFee, + memo?: string +}; + +type sendMsgEnableValidatorParams = { + value: MsgEnableValidator, + fee?: StdFee, + memo?: string +}; + +type sendMsgApproveDisableValidatorParams = { + value: MsgApproveDisableValidator, + fee?: StdFee, + memo?: string +}; + +type sendMsgProposeDisableValidatorParams = { + value: MsgProposeDisableValidator, + fee?: StdFee, + memo?: string +}; + +type sendMsgDisableValidatorParams = { + value: MsgDisableValidator, + fee?: StdFee, + memo?: string +}; + + +type msgCreateValidatorParams = { + value: MsgCreateValidator, +}; + +type msgRejectDisableValidatorParams = { + value: MsgRejectDisableValidator, +}; + +type msgEnableValidatorParams = { + value: MsgEnableValidator, +}; + +type msgApproveDisableValidatorParams = { + value: MsgApproveDisableValidator, +}; + +type msgProposeDisableValidatorParams = { + value: MsgProposeDisableValidator, +}; + +type msgDisableValidatorParams = { + value: MsgDisableValidator, +}; + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + async sendMsgCreateValidator({ value, fee, memo }: sendMsgCreateValidatorParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgCreateValidator: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgCreateValidator({ value: MsgCreateValidator.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgCreateValidator: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgRejectDisableValidator({ value, fee, memo }: sendMsgRejectDisableValidatorParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgRejectDisableValidator: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgRejectDisableValidator({ value: MsgRejectDisableValidator.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgRejectDisableValidator: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgEnableValidator({ value, fee, memo }: sendMsgEnableValidatorParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgEnableValidator: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgEnableValidator({ value: MsgEnableValidator.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgEnableValidator: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgApproveDisableValidator({ value, fee, memo }: sendMsgApproveDisableValidatorParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgApproveDisableValidator: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgApproveDisableValidator({ value: MsgApproveDisableValidator.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgApproveDisableValidator: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgProposeDisableValidator({ value, fee, memo }: sendMsgProposeDisableValidatorParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgProposeDisableValidator: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgProposeDisableValidator({ value: MsgProposeDisableValidator.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgProposeDisableValidator: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgDisableValidator({ value, fee, memo }: sendMsgDisableValidatorParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgDisableValidator: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgDisableValidator({ value: MsgDisableValidator.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgDisableValidator: Could not broadcast Tx: '+ e.message) + } + }, + + + msgCreateValidator({ value }: msgCreateValidatorParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.validator.MsgCreateValidator", value: MsgCreateValidator.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgCreateValidator: Could not create message: ' + e.message) + } + }, + + msgRejectDisableValidator({ value }: msgRejectDisableValidatorParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.validator.MsgRejectDisableValidator", value: MsgRejectDisableValidator.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgRejectDisableValidator: Could not create message: ' + e.message) + } + }, + + msgEnableValidator({ value }: msgEnableValidatorParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.validator.MsgEnableValidator", value: MsgEnableValidator.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgEnableValidator: Could not create message: ' + e.message) + } + }, + + msgApproveDisableValidator({ value }: msgApproveDisableValidatorParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.validator.MsgApproveDisableValidator", value: MsgApproveDisableValidator.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgApproveDisableValidator: Could not create message: ' + e.message) + } + }, + + msgProposeDisableValidator({ value }: msgProposeDisableValidatorParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.validator.MsgProposeDisableValidator", value: MsgProposeDisableValidator.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgProposeDisableValidator: Could not create message: ' + e.message) + } + }, + + msgDisableValidator({ value }: msgDisableValidatorParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.validator.MsgDisableValidator", value: MsgDisableValidator.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgDisableValidator: Could not create message: ' + e.message) + } + }, + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + Description: getStructure(typeDescription.fromPartial({})), + DisabledValidator: getStructure(typeDisabledValidator.fromPartial({})), + Grant: getStructure(typeGrant.fromPartial({})), + LastValidatorPower: getStructure(typeLastValidatorPower.fromPartial({})), + ProposedDisableValidator: getStructure(typeProposedDisableValidator.fromPartial({})), + RejectedDisableValidator: getStructure(typeRejectedDisableValidator.fromPartial({})), + Validator: getStructure(typeValidator.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + ZigbeeallianceDistributedcomplianceledgerValidator: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/registry.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/registry.ts new file mode 100755 index 000000000..3075b03dc --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/registry.ts @@ -0,0 +1,19 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; +import { MsgCreateValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/tx"; +import { MsgRejectDisableValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/tx"; +import { MsgEnableValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/tx"; +import { MsgApproveDisableValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/tx"; +import { MsgProposeDisableValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/tx"; +import { MsgDisableValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/tx"; + +const msgTypes: Array<[string, GeneratedType]> = [ + ["/zigbeealliance.distributedcomplianceledger.validator.MsgCreateValidator", MsgCreateValidator], + ["/zigbeealliance.distributedcomplianceledger.validator.MsgRejectDisableValidator", MsgRejectDisableValidator], + ["/zigbeealliance.distributedcomplianceledger.validator.MsgEnableValidator", MsgEnableValidator], + ["/zigbeealliance.distributedcomplianceledger.validator.MsgApproveDisableValidator", MsgApproveDisableValidator], + ["/zigbeealliance.distributedcomplianceledger.validator.MsgProposeDisableValidator", MsgProposeDisableValidator], + ["/zigbeealliance.distributedcomplianceledger.validator.MsgDisableValidator", MsgDisableValidator], + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/rest.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/rest.ts new file mode 100644 index 000000000..097dfe2fb --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/rest.ts @@ -0,0 +1,781 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +export interface DistributedcomplianceledgervalidatorDisabledValidator { + address?: string; + creator?: string; + approvals?: ValidatorGrant[]; + disabledByNodeAdmin?: boolean; + rejects?: ValidatorGrant[]; +} + +export interface DistributedcomplianceledgervalidatorLastValidatorPower { + owner?: string; + + /** @format int32 */ + power?: number; +} + +export interface DistributedcomplianceledgervalidatorProposedDisableValidator { + address?: string; + creator?: string; + approvals?: ValidatorGrant[]; + rejects?: ValidatorGrant[]; +} + +export interface DistributedcomplianceledgervalidatorRejectedDisableValidator { + address?: string; + creator?: string; + approvals?: ValidatorGrant[]; + rejects?: ValidatorGrant[]; +} + +export interface DistributedcomplianceledgervalidatorValidator { + /** the account address of validator owner */ + owner?: string; + + /** description of the validator */ + description?: ValidatorDescription; + + /** + * the consensus public key of the tendermint validator + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * Example 1: Pack and unpack a message in C++. + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * Example 2: Pack and unpack a message in Java. + * Any any = Any.pack(foo); + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * Example 3: Pack and unpack a message in Python. + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * Example 4: Pack and unpack a message in Go + * foo := &pb.Foo{...} + * any, err := anypb.New(foo) + * if err != nil { + * ... + * } + * ... + * foo := &pb.Foo{} + * if err := any.UnmarshalTo(foo); err != nil { + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + */ + pubKey?: ProtobufAny; + + /** + * validator consensus power + * @format int32 + */ + power?: number; + + /** has the validator been removed from validator set */ + jailed?: boolean; + + /** the reason of validator jailing */ + jailedReason?: string; +} + +/** +* `Any` contains an arbitrary serialized protocol buffer message along with a +URL that describes the type of the serialized message. + +Protobuf library provides support to pack/unpack Any values in the form +of utility functions or additional generated methods of the Any type. + +Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + +Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + +The pack methods provided by protobuf library will by default use +'type.googleapis.com/full.type.name' as the type URL and the unpack +methods only use the fully qualified type name after the last '/' +in the type URL, for example "foo.bar.com/x/y.z" will yield type +name "y.z". + + +JSON +==== +The JSON representation of an `Any` value uses the regular +representation of the deserialized, embedded message, with an +additional field `@type` which contains the type URL. Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + +If the embedded message type is well-known and has a custom JSON +representation, that representation will be embedded adding a field +`value` which holds the custom JSON in addition to the `@type` +field. Example (for message [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } +*/ +export interface ProtobufAny { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +/** +* message SomeRequest { + Foo some_parameter = 1; + PageRequest pagination = 2; + } +*/ +export interface V1Beta1PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + * @format byte + */ + key?: string; + + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + * @format uint64 + */ + offset?: string; + + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + * @format uint64 + */ + limit?: string; + + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + count_total?: boolean; + + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse?: boolean; +} + +/** +* PageResponse is to be embedded in gRPC response messages where the +corresponding request message has used PageRequest. + + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } +*/ +export interface V1Beta1PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + * @format byte + */ + next_key?: string; + + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + * @format uint64 + */ + total?: string; +} + +export interface ValidatorDescription { + /** a human-readable name for the validator. */ + moniker?: string; + + /** optional identity signature. */ + identity?: string; + + /** optional website link. */ + website?: string; + + /** optional details. */ + details?: string; +} + +export interface ValidatorGrant { + address?: string; + + /** + * number of nanoseconds elapsed since January 1, 1970 UTC + * @format int64 + */ + time?: string; + info?: string; +} + +export type ValidatorMsgApproveDisableValidatorResponse = object; + +export type ValidatorMsgCreateValidatorResponse = object; + +export type ValidatorMsgDisableValidatorResponse = object; + +export type ValidatorMsgEnableValidatorResponse = object; + +export type ValidatorMsgProposeDisableValidatorResponse = object; + +export type ValidatorMsgRejectDisableValidatorResponse = object; + +export interface ValidatorQueryAllDisabledValidatorResponse { + disabledValidator?: DistributedcomplianceledgervalidatorDisabledValidator[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface ValidatorQueryAllLastValidatorPowerResponse { + lastValidatorPower?: DistributedcomplianceledgervalidatorLastValidatorPower[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface ValidatorQueryAllProposedDisableValidatorResponse { + proposedDisableValidator?: DistributedcomplianceledgervalidatorProposedDisableValidator[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface ValidatorQueryAllRejectedDisableValidatorResponse { + rejectedValidator?: DistributedcomplianceledgervalidatorRejectedDisableValidator[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface ValidatorQueryAllValidatorResponse { + validator?: DistributedcomplianceledgervalidatorValidator[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface ValidatorQueryGetDisabledValidatorResponse { + disabledValidator?: DistributedcomplianceledgervalidatorDisabledValidator; +} + +export interface ValidatorQueryGetLastValidatorPowerResponse { + lastValidatorPower?: DistributedcomplianceledgervalidatorLastValidatorPower; +} + +export interface ValidatorQueryGetProposedDisableValidatorResponse { + proposedDisableValidator?: DistributedcomplianceledgervalidatorProposedDisableValidator; +} + +export interface ValidatorQueryGetRejectedDisableValidatorResponse { + rejectedValidator?: DistributedcomplianceledgervalidatorRejectedDisableValidator; +} + +export interface ValidatorQueryGetValidatorResponse { + validator?: DistributedcomplianceledgervalidatorValidator; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title zigbeealliance/distributedcomplianceledger/validator/description.proto + * @version version not set + */ +export class Api extends HttpClient { + /** + * No description + * + * @tags Query + * @name QueryDisabledValidatorAll + * @summary Queries a list of DisabledValidator items. + * @request GET:/dcl/validator/disabled-nodes + */ + queryDisabledValidatorAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/validator/disabled-nodes`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryDisabledValidator + * @summary Queries a DisabledValidator by index. + * @request GET:/dcl/validator/disabled-nodes/{address} + */ + queryDisabledValidator = (address: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/validator/disabled-nodes/${address}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryLastValidatorPowerAll + * @summary Queries a list of lastValidatorPower items. + * @request GET:/dcl/validator/last-powers + */ + queryLastValidatorPowerAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/validator/last-powers`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryLastValidatorPower + * @summary Queries a lastValidatorPower by index. + * @request GET:/dcl/validator/last-powers/{owner} + */ + queryLastValidatorPower = (owner: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/validator/last-powers/${owner}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryValidatorAll + * @summary Queries a list of validator items. + * @request GET:/dcl/validator/nodes + */ + queryValidatorAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/validator/nodes`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryValidator + * @summary Queries a validator by index. + * @request GET:/dcl/validator/nodes/{owner} + */ + queryValidator = (owner: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/validator/nodes/${owner}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryProposedDisableValidatorAll + * @summary Queries a list of ProposedDisableValidator items. + * @request GET:/dcl/validator/proposed-disable-nodes + */ + queryProposedDisableValidatorAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/validator/proposed-disable-nodes`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryProposedDisableValidator + * @summary Queries a ProposedDisableValidator by index. + * @request GET:/dcl/validator/proposed-disable-nodes/{address} + */ + queryProposedDisableValidator = (address: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/validator/proposed-disable-nodes/${address}`, + method: "GET", + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRejectedDisableValidatorAll + * @summary Queries a list of RejectedNode items. + * @request GET:/dcl/validator/rejected-disable-nodes + */ + queryRejectedDisableValidatorAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/validator/rejected-disable-nodes`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryRejectedDisableValidator + * @summary Queries a RejectedNode by index. + * @request GET:/dcl/validator/rejected-disable-nodes/{owner} + */ + queryRejectedDisableValidator = (owner: string, params: RequestParams = {}) => + this.request({ + path: `/dcl/validator/rejected-disable-nodes/${owner}`, + method: "GET", + format: "json", + ...params, + }); +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types.ts new file mode 100755 index 000000000..adb261839 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types.ts @@ -0,0 +1,19 @@ +import { Description } from "./types/zigbeealliance/distributedcomplianceledger/validator/description" +import { DisabledValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/disabled_validator" +import { Grant } from "./types/zigbeealliance/distributedcomplianceledger/validator/grant" +import { LastValidatorPower } from "./types/zigbeealliance/distributedcomplianceledger/validator/last_validator_power" +import { ProposedDisableValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/proposed_disable_validator" +import { RejectedDisableValidator } from "./types/zigbeealliance/distributedcomplianceledger/validator/rejected_validator" +import { Validator } from "./types/zigbeealliance/distributedcomplianceledger/validator/validator" + + +export { + Description, + DisabledValidator, + Grant, + LastValidatorPower, + ProposedDisableValidator, + RejectedDisableValidator, + Validator, + + } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/cosmos/base/query/v1beta1/pagination.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/cosmos/base/query/v1beta1/pagination.ts new file mode 100644 index 000000000..a31ca249d --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/cosmos/base/query/v1beta1/pagination.ts @@ -0,0 +1,286 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.base.query.v1beta1"; + +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset: number; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: number; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + countTotal: boolean; + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse: boolean; +} + +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total: number; +} + +function createBasePageRequest(): PageRequest { + return { key: new Uint8Array(), offset: 0, limit: 0, countTotal: false, reverse: false }; +} + +export const PageRequest = { + encode(message: PageRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.offset !== 0) { + writer.uint32(16).uint64(message.offset); + } + if (message.limit !== 0) { + writer.uint32(24).uint64(message.limit); + } + if (message.countTotal === true) { + writer.uint32(32).bool(message.countTotal); + } + if (message.reverse === true) { + writer.uint32(40).bool(message.reverse); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + case 2: + message.offset = longToNumber(reader.uint64() as Long); + break; + case 3: + message.limit = longToNumber(reader.uint64() as Long); + break; + case 4: + message.countTotal = reader.bool(); + break; + case 5: + message.reverse = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageRequest { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + offset: isSet(object.offset) ? Number(object.offset) : 0, + limit: isSet(object.limit) ? Number(object.limit) : 0, + countTotal: isSet(object.countTotal) ? Boolean(object.countTotal) : false, + reverse: isSet(object.reverse) ? Boolean(object.reverse) : false, + }; + }, + + toJSON(message: PageRequest): unknown { + const obj: any = {}; + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.offset !== undefined && (obj.offset = Math.round(message.offset)); + message.limit !== undefined && (obj.limit = Math.round(message.limit)); + message.countTotal !== undefined && (obj.countTotal = message.countTotal); + message.reverse !== undefined && (obj.reverse = message.reverse); + return obj; + }, + + fromPartial, I>>(object: I): PageRequest { + const message = createBasePageRequest(); + message.key = object.key ?? new Uint8Array(); + message.offset = object.offset ?? 0; + message.limit = object.limit ?? 0; + message.countTotal = object.countTotal ?? false; + message.reverse = object.reverse ?? false; + return message; + }, +}; + +function createBasePageResponse(): PageResponse { + return { nextKey: new Uint8Array(), total: 0 }; +} + +export const PageResponse = { + encode(message: PageResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextKey.length !== 0) { + writer.uint32(10).bytes(message.nextKey); + } + if (message.total !== 0) { + writer.uint32(16).uint64(message.total); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nextKey = reader.bytes(); + break; + case 2: + message.total = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageResponse { + return { + nextKey: isSet(object.nextKey) ? bytesFromBase64(object.nextKey) : new Uint8Array(), + total: isSet(object.total) ? Number(object.total) : 0, + }; + }, + + toJSON(message: PageResponse): unknown { + const obj: any = {}; + message.nextKey !== undefined + && (obj.nextKey = base64FromBytes(message.nextKey !== undefined ? message.nextKey : new Uint8Array())); + message.total !== undefined && (obj.total = Math.round(message.total)); + return obj; + }, + + fromPartial, I>>(object: I): PageResponse { + const message = createBasePageResponse(); + message.nextKey = object.nextKey ?? new Uint8Array(); + message.total = object.total ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/cosmos_proto/cosmos.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..79c8d3486 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/cosmos_proto/cosmos.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/gogoproto/gogo.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/google/api/annotations.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/google/api/annotations.ts new file mode 100644 index 000000000..aace4787f --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/google/api/annotations.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "google.api"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/google/api/http.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/google/api/http.ts new file mode 100644 index 000000000..17e770d15 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/google/api/http.ts @@ -0,0 +1,589 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get: + | string + | undefined; + /** Used for updating a resource. */ + put: + | string + | undefined; + /** Used for creating a resource. */ + post: + | string + | undefined; + /** Used for deleting a resource. */ + delete: + | string + | undefined; + /** Used for updating a resource. */ + patch: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined + && (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined + && (obj.custom = message.custom ? CustomHttpPattern.toJSON(message.custom) : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.additionalBindings = []; + } + return obj; + }, + + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/google/protobuf/any.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/google/protobuf/any.ts new file mode 100644 index 000000000..c4ebf38be --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/google/protobuf/any.ts @@ -0,0 +1,240 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := anypb.New(foo) + * if err != nil { + * ... + * } + * ... + * foo := &pb.Foo{} + * if err := any.UnmarshalTo(foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface Any { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + typeUrl: string; + /** Must be a valid serialized protocol buffer of the above specified type. */ + value: Uint8Array; +} + +function createBaseAny(): Any { + return { typeUrl: "", value: new Uint8Array() }; +} + +export const Any = { + encode(message: Any, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.typeUrl !== "") { + writer.uint32(10).string(message.typeUrl); + } + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Any { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAny(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.typeUrl = reader.string(); + break; + case 2: + message.value = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Any { + return { + typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : "", + value: isSet(object.value) ? bytesFromBase64(object.value) : new Uint8Array(), + }; + }, + + toJSON(message: Any): unknown { + const obj: any = {}; + message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl); + message.value !== undefined + && (obj.value = base64FromBytes(message.value !== undefined ? message.value : new Uint8Array())); + return obj; + }, + + fromPartial, I>>(object: I): Any { + const message = createBaseAny(); + message.typeUrl = object.typeUrl ?? ""; + message.value = object.value ?? new Uint8Array(); + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/google/protobuf/descriptor.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/description.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/description.ts new file mode 100644 index 000000000..43ed2f6f2 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/description.ts @@ -0,0 +1,106 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.validator"; + +export interface Description { + /** a human-readable name for the validator. */ + moniker: string; + /** optional identity signature. */ + identity: string; + /** optional website link. */ + website: string; + /** optional details. */ + details: string; +} + +function createBaseDescription(): Description { + return { moniker: "", identity: "", website: "", details: "" }; +} + +export const Description = { + encode(message: Description, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.moniker !== "") { + writer.uint32(10).string(message.moniker); + } + if (message.identity !== "") { + writer.uint32(18).string(message.identity); + } + if (message.website !== "") { + writer.uint32(26).string(message.website); + } + if (message.details !== "") { + writer.uint32(34).string(message.details); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Description { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescription(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.moniker = reader.string(); + break; + case 2: + message.identity = reader.string(); + break; + case 3: + message.website = reader.string(); + break; + case 4: + message.details = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Description { + return { + moniker: isSet(object.moniker) ? String(object.moniker) : "", + identity: isSet(object.identity) ? String(object.identity) : "", + website: isSet(object.website) ? String(object.website) : "", + details: isSet(object.details) ? String(object.details) : "", + }; + }, + + toJSON(message: Description): unknown { + const obj: any = {}; + message.moniker !== undefined && (obj.moniker = message.moniker); + message.identity !== undefined && (obj.identity = message.identity); + message.website !== undefined && (obj.website = message.website); + message.details !== undefined && (obj.details = message.details); + return obj; + }, + + fromPartial, I>>(object: I): Description { + const message = createBaseDescription(); + message.moniker = object.moniker ?? ""; + message.identity = object.identity ?? ""; + message.website = object.website ?? ""; + message.details = object.details ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/disabled_validator.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/disabled_validator.ts new file mode 100644 index 000000000..fe55df929 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/disabled_validator.ts @@ -0,0 +1,121 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Grant } from "./grant"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.validator"; + +export interface DisabledValidator { + address: string; + creator: string; + approvals: Grant[]; + disabledByNodeAdmin: boolean; + rejects: Grant[]; +} + +function createBaseDisabledValidator(): DisabledValidator { + return { address: "", creator: "", approvals: [], disabledByNodeAdmin: false, rejects: [] }; +} + +export const DisabledValidator = { + encode(message: DisabledValidator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + if (message.creator !== "") { + writer.uint32(18).string(message.creator); + } + for (const v of message.approvals) { + Grant.encode(v!, writer.uint32(26).fork()).ldelim(); + } + if (message.disabledByNodeAdmin === true) { + writer.uint32(32).bool(message.disabledByNodeAdmin); + } + for (const v of message.rejects) { + Grant.encode(v!, writer.uint32(42).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DisabledValidator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDisabledValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + case 2: + message.creator = reader.string(); + break; + case 3: + message.approvals.push(Grant.decode(reader, reader.uint32())); + break; + case 4: + message.disabledByNodeAdmin = reader.bool(); + break; + case 5: + message.rejects.push(Grant.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DisabledValidator { + return { + address: isSet(object.address) ? String(object.address) : "", + creator: isSet(object.creator) ? String(object.creator) : "", + approvals: Array.isArray(object?.approvals) ? object.approvals.map((e: any) => Grant.fromJSON(e)) : [], + disabledByNodeAdmin: isSet(object.disabledByNodeAdmin) ? Boolean(object.disabledByNodeAdmin) : false, + rejects: Array.isArray(object?.rejects) ? object.rejects.map((e: any) => Grant.fromJSON(e)) : [], + }; + }, + + toJSON(message: DisabledValidator): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + message.creator !== undefined && (obj.creator = message.creator); + if (message.approvals) { + obj.approvals = message.approvals.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.approvals = []; + } + message.disabledByNodeAdmin !== undefined && (obj.disabledByNodeAdmin = message.disabledByNodeAdmin); + if (message.rejects) { + obj.rejects = message.rejects.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.rejects = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DisabledValidator { + const message = createBaseDisabledValidator(); + message.address = object.address ?? ""; + message.creator = object.creator ?? ""; + message.approvals = object.approvals?.map((e) => Grant.fromPartial(e)) || []; + message.disabledByNodeAdmin = object.disabledByNodeAdmin ?? false; + message.rejects = object.rejects?.map((e) => Grant.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/genesis.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/genesis.ts new file mode 100644 index 000000000..4a8779355 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/genesis.ts @@ -0,0 +1,159 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { DisabledValidator } from "./disabled_validator"; +import { LastValidatorPower } from "./last_validator_power"; +import { ProposedDisableValidator } from "./proposed_disable_validator"; +import { RejectedDisableValidator } from "./rejected_validator"; +import { Validator } from "./validator"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.validator"; + +/** GenesisState defines the validator module's genesis state. */ +export interface GenesisState { + validatorList: Validator[]; + lastValidatorPowerList: LastValidatorPower[]; + proposedDisableValidatorList: ProposedDisableValidator[]; + disabledValidatorList: DisabledValidator[]; + /** this line is used by starport scaffolding # genesis/proto/state */ + rejectedValidatorList: RejectedDisableValidator[]; +} + +function createBaseGenesisState(): GenesisState { + return { + validatorList: [], + lastValidatorPowerList: [], + proposedDisableValidatorList: [], + disabledValidatorList: [], + rejectedValidatorList: [], + }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.validatorList) { + Validator.encode(v!, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.lastValidatorPowerList) { + LastValidatorPower.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.proposedDisableValidatorList) { + ProposedDisableValidator.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.disabledValidatorList) { + DisabledValidator.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.rejectedValidatorList) { + RejectedDisableValidator.encode(v!, writer.uint32(42).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.validatorList.push(Validator.decode(reader, reader.uint32())); + break; + case 2: + message.lastValidatorPowerList.push(LastValidatorPower.decode(reader, reader.uint32())); + break; + case 3: + message.proposedDisableValidatorList.push(ProposedDisableValidator.decode(reader, reader.uint32())); + break; + case 4: + message.disabledValidatorList.push(DisabledValidator.decode(reader, reader.uint32())); + break; + case 5: + message.rejectedValidatorList.push(RejectedDisableValidator.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GenesisState { + return { + validatorList: Array.isArray(object?.validatorList) + ? object.validatorList.map((e: any) => Validator.fromJSON(e)) + : [], + lastValidatorPowerList: Array.isArray(object?.lastValidatorPowerList) + ? object.lastValidatorPowerList.map((e: any) => LastValidatorPower.fromJSON(e)) + : [], + proposedDisableValidatorList: Array.isArray(object?.proposedDisableValidatorList) + ? object.proposedDisableValidatorList.map((e: any) => ProposedDisableValidator.fromJSON(e)) + : [], + disabledValidatorList: Array.isArray(object?.disabledValidatorList) + ? object.disabledValidatorList.map((e: any) => DisabledValidator.fromJSON(e)) + : [], + rejectedValidatorList: Array.isArray(object?.rejectedValidatorList) + ? object.rejectedValidatorList.map((e: any) => RejectedDisableValidator.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GenesisState): unknown { + const obj: any = {}; + if (message.validatorList) { + obj.validatorList = message.validatorList.map((e) => e ? Validator.toJSON(e) : undefined); + } else { + obj.validatorList = []; + } + if (message.lastValidatorPowerList) { + obj.lastValidatorPowerList = message.lastValidatorPowerList.map((e) => + e ? LastValidatorPower.toJSON(e) : undefined + ); + } else { + obj.lastValidatorPowerList = []; + } + if (message.proposedDisableValidatorList) { + obj.proposedDisableValidatorList = message.proposedDisableValidatorList.map((e) => + e ? ProposedDisableValidator.toJSON(e) : undefined + ); + } else { + obj.proposedDisableValidatorList = []; + } + if (message.disabledValidatorList) { + obj.disabledValidatorList = message.disabledValidatorList.map((e) => e ? DisabledValidator.toJSON(e) : undefined); + } else { + obj.disabledValidatorList = []; + } + if (message.rejectedValidatorList) { + obj.rejectedValidatorList = message.rejectedValidatorList.map((e) => + e ? RejectedDisableValidator.toJSON(e) : undefined + ); + } else { + obj.rejectedValidatorList = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GenesisState { + const message = createBaseGenesisState(); + message.validatorList = object.validatorList?.map((e) => Validator.fromPartial(e)) || []; + message.lastValidatorPowerList = object.lastValidatorPowerList?.map((e) => LastValidatorPower.fromPartial(e)) || []; + message.proposedDisableValidatorList = + object.proposedDisableValidatorList?.map((e) => ProposedDisableValidator.fromPartial(e)) || []; + message.disabledValidatorList = object.disabledValidatorList?.map((e) => DisabledValidator.fromPartial(e)) || []; + message.rejectedValidatorList = object.rejectedValidatorList?.map((e) => RejectedDisableValidator.fromPartial(e)) + || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/grant.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/grant.ts new file mode 100644 index 000000000..f6db5569f --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/grant.ts @@ -0,0 +1,125 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.validator"; + +export interface Grant { + address: string; + /** number of nanoseconds elapsed since January 1, 1970 UTC */ + time: number; + info: string; +} + +function createBaseGrant(): Grant { + return { address: "", time: 0, info: "" }; +} + +export const Grant = { + encode(message: Grant, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + if (message.time !== 0) { + writer.uint32(16).int64(message.time); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Grant { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGrant(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + case 2: + message.time = longToNumber(reader.int64() as Long); + break; + case 3: + message.info = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Grant { + return { + address: isSet(object.address) ? String(object.address) : "", + time: isSet(object.time) ? Number(object.time) : 0, + info: isSet(object.info) ? String(object.info) : "", + }; + }, + + toJSON(message: Grant): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + message.time !== undefined && (obj.time = Math.round(message.time)); + message.info !== undefined && (obj.info = message.info); + return obj; + }, + + fromPartial, I>>(object: I): Grant { + const message = createBaseGrant(); + message.address = object.address ?? ""; + message.time = object.time ?? 0; + message.info = object.info ?? ""; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/last_validator_power.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/last_validator_power.ts new file mode 100644 index 000000000..c55ba5785 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/last_validator_power.ts @@ -0,0 +1,82 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.validator"; + +export interface LastValidatorPower { + owner: string; + power: number; +} + +function createBaseLastValidatorPower(): LastValidatorPower { + return { owner: "", power: 0 }; +} + +export const LastValidatorPower = { + encode(message: LastValidatorPower, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.owner !== "") { + writer.uint32(10).string(message.owner); + } + if (message.power !== 0) { + writer.uint32(16).int32(message.power); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): LastValidatorPower { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseLastValidatorPower(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.owner = reader.string(); + break; + case 2: + message.power = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): LastValidatorPower { + return { + owner: isSet(object.owner) ? String(object.owner) : "", + power: isSet(object.power) ? Number(object.power) : 0, + }; + }, + + toJSON(message: LastValidatorPower): unknown { + const obj: any = {}; + message.owner !== undefined && (obj.owner = message.owner); + message.power !== undefined && (obj.power = Math.round(message.power)); + return obj; + }, + + fromPartial, I>>(object: I): LastValidatorPower { + const message = createBaseLastValidatorPower(); + message.owner = object.owner ?? ""; + message.power = object.power ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/proposed_disable_validator.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/proposed_disable_validator.ts new file mode 100644 index 000000000..c0ec5c1cb --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/proposed_disable_validator.ts @@ -0,0 +1,111 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Grant } from "./grant"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.validator"; + +export interface ProposedDisableValidator { + address: string; + creator: string; + approvals: Grant[]; + rejects: Grant[]; +} + +function createBaseProposedDisableValidator(): ProposedDisableValidator { + return { address: "", creator: "", approvals: [], rejects: [] }; +} + +export const ProposedDisableValidator = { + encode(message: ProposedDisableValidator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + if (message.creator !== "") { + writer.uint32(18).string(message.creator); + } + for (const v of message.approvals) { + Grant.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.rejects) { + Grant.encode(v!, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ProposedDisableValidator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseProposedDisableValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + case 2: + message.creator = reader.string(); + break; + case 3: + message.approvals.push(Grant.decode(reader, reader.uint32())); + break; + case 4: + message.rejects.push(Grant.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ProposedDisableValidator { + return { + address: isSet(object.address) ? String(object.address) : "", + creator: isSet(object.creator) ? String(object.creator) : "", + approvals: Array.isArray(object?.approvals) ? object.approvals.map((e: any) => Grant.fromJSON(e)) : [], + rejects: Array.isArray(object?.rejects) ? object.rejects.map((e: any) => Grant.fromJSON(e)) : [], + }; + }, + + toJSON(message: ProposedDisableValidator): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + message.creator !== undefined && (obj.creator = message.creator); + if (message.approvals) { + obj.approvals = message.approvals.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.approvals = []; + } + if (message.rejects) { + obj.rejects = message.rejects.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.rejects = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ProposedDisableValidator { + const message = createBaseProposedDisableValidator(); + message.address = object.address ?? ""; + message.creator = object.creator ?? ""; + message.approvals = object.approvals?.map((e) => Grant.fromPartial(e)) || []; + message.rejects = object.rejects?.map((e) => Grant.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/query.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/query.ts new file mode 100644 index 000000000..cd3adfd70 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/query.ts @@ -0,0 +1,1390 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { PageRequest, PageResponse } from "../../../cosmos/base/query/v1beta1/pagination"; +import { DisabledValidator } from "./disabled_validator"; +import { LastValidatorPower } from "./last_validator_power"; +import { ProposedDisableValidator } from "./proposed_disable_validator"; +import { RejectedDisableValidator } from "./rejected_validator"; +import { Validator } from "./validator"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.validator"; + +export interface QueryGetValidatorRequest { + owner: string; +} + +export interface QueryGetValidatorResponse { + validator: Validator | undefined; +} + +export interface QueryAllValidatorRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllValidatorResponse { + validator: Validator[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetLastValidatorPowerRequest { + owner: string; +} + +export interface QueryGetLastValidatorPowerResponse { + lastValidatorPower: LastValidatorPower | undefined; +} + +export interface QueryAllLastValidatorPowerRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllLastValidatorPowerResponse { + lastValidatorPower: LastValidatorPower[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetProposedDisableValidatorRequest { + address: string; +} + +export interface QueryGetProposedDisableValidatorResponse { + proposedDisableValidator: ProposedDisableValidator | undefined; +} + +export interface QueryAllProposedDisableValidatorRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllProposedDisableValidatorResponse { + proposedDisableValidator: ProposedDisableValidator[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetDisabledValidatorRequest { + address: string; +} + +export interface QueryGetDisabledValidatorResponse { + disabledValidator: DisabledValidator | undefined; +} + +export interface QueryAllDisabledValidatorRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllDisabledValidatorResponse { + disabledValidator: DisabledValidator[]; + pagination: PageResponse | undefined; +} + +export interface QueryGetRejectedDisableValidatorRequest { + owner: string; +} + +export interface QueryGetRejectedDisableValidatorResponse { + rejectedValidator: RejectedDisableValidator | undefined; +} + +export interface QueryAllRejectedDisableValidatorRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllRejectedDisableValidatorResponse { + rejectedValidator: RejectedDisableValidator[]; + pagination: PageResponse | undefined; +} + +function createBaseQueryGetValidatorRequest(): QueryGetValidatorRequest { + return { owner: "" }; +} + +export const QueryGetValidatorRequest = { + encode(message: QueryGetValidatorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.owner !== "") { + writer.uint32(10).string(message.owner); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetValidatorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetValidatorRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.owner = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetValidatorRequest { + return { owner: isSet(object.owner) ? String(object.owner) : "" }; + }, + + toJSON(message: QueryGetValidatorRequest): unknown { + const obj: any = {}; + message.owner !== undefined && (obj.owner = message.owner); + return obj; + }, + + fromPartial, I>>(object: I): QueryGetValidatorRequest { + const message = createBaseQueryGetValidatorRequest(); + message.owner = object.owner ?? ""; + return message; + }, +}; + +function createBaseQueryGetValidatorResponse(): QueryGetValidatorResponse { + return { validator: undefined }; +} + +export const QueryGetValidatorResponse = { + encode(message: QueryGetValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.validator !== undefined) { + Validator.encode(message.validator, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetValidatorResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.validator = Validator.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetValidatorResponse { + return { validator: isSet(object.validator) ? Validator.fromJSON(object.validator) : undefined }; + }, + + toJSON(message: QueryGetValidatorResponse): unknown { + const obj: any = {}; + message.validator !== undefined + && (obj.validator = message.validator ? Validator.toJSON(message.validator) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryGetValidatorResponse { + const message = createBaseQueryGetValidatorResponse(); + message.validator = (object.validator !== undefined && object.validator !== null) + ? Validator.fromPartial(object.validator) + : undefined; + return message; + }, +}; + +function createBaseQueryAllValidatorRequest(): QueryAllValidatorRequest { + return { pagination: undefined }; +} + +export const QueryAllValidatorRequest = { + encode(message: QueryAllValidatorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllValidatorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllValidatorRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllValidatorRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllValidatorRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryAllValidatorRequest { + const message = createBaseQueryAllValidatorRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllValidatorResponse(): QueryAllValidatorResponse { + return { validator: [], pagination: undefined }; +} + +export const QueryAllValidatorResponse = { + encode(message: QueryAllValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.validator) { + Validator.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllValidatorResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.validator.push(Validator.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllValidatorResponse { + return { + validator: Array.isArray(object?.validator) ? object.validator.map((e: any) => Validator.fromJSON(e)) : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllValidatorResponse): unknown { + const obj: any = {}; + if (message.validator) { + obj.validator = message.validator.map((e) => e ? Validator.toJSON(e) : undefined); + } else { + obj.validator = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryAllValidatorResponse { + const message = createBaseQueryAllValidatorResponse(); + message.validator = object.validator?.map((e) => Validator.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetLastValidatorPowerRequest(): QueryGetLastValidatorPowerRequest { + return { owner: "" }; +} + +export const QueryGetLastValidatorPowerRequest = { + encode(message: QueryGetLastValidatorPowerRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.owner !== "") { + writer.uint32(10).string(message.owner); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetLastValidatorPowerRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetLastValidatorPowerRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.owner = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetLastValidatorPowerRequest { + return { owner: isSet(object.owner) ? String(object.owner) : "" }; + }, + + toJSON(message: QueryGetLastValidatorPowerRequest): unknown { + const obj: any = {}; + message.owner !== undefined && (obj.owner = message.owner); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetLastValidatorPowerRequest { + const message = createBaseQueryGetLastValidatorPowerRequest(); + message.owner = object.owner ?? ""; + return message; + }, +}; + +function createBaseQueryGetLastValidatorPowerResponse(): QueryGetLastValidatorPowerResponse { + return { lastValidatorPower: undefined }; +} + +export const QueryGetLastValidatorPowerResponse = { + encode(message: QueryGetLastValidatorPowerResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.lastValidatorPower !== undefined) { + LastValidatorPower.encode(message.lastValidatorPower, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetLastValidatorPowerResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetLastValidatorPowerResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.lastValidatorPower = LastValidatorPower.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetLastValidatorPowerResponse { + return { + lastValidatorPower: isSet(object.lastValidatorPower) + ? LastValidatorPower.fromJSON(object.lastValidatorPower) + : undefined, + }; + }, + + toJSON(message: QueryGetLastValidatorPowerResponse): unknown { + const obj: any = {}; + message.lastValidatorPower !== undefined && (obj.lastValidatorPower = message.lastValidatorPower + ? LastValidatorPower.toJSON(message.lastValidatorPower) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetLastValidatorPowerResponse { + const message = createBaseQueryGetLastValidatorPowerResponse(); + message.lastValidatorPower = (object.lastValidatorPower !== undefined && object.lastValidatorPower !== null) + ? LastValidatorPower.fromPartial(object.lastValidatorPower) + : undefined; + return message; + }, +}; + +function createBaseQueryAllLastValidatorPowerRequest(): QueryAllLastValidatorPowerRequest { + return { pagination: undefined }; +} + +export const QueryAllLastValidatorPowerRequest = { + encode(message: QueryAllLastValidatorPowerRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllLastValidatorPowerRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllLastValidatorPowerRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllLastValidatorPowerRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllLastValidatorPowerRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllLastValidatorPowerRequest { + const message = createBaseQueryAllLastValidatorPowerRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllLastValidatorPowerResponse(): QueryAllLastValidatorPowerResponse { + return { lastValidatorPower: [], pagination: undefined }; +} + +export const QueryAllLastValidatorPowerResponse = { + encode(message: QueryAllLastValidatorPowerResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.lastValidatorPower) { + LastValidatorPower.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllLastValidatorPowerResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllLastValidatorPowerResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.lastValidatorPower.push(LastValidatorPower.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllLastValidatorPowerResponse { + return { + lastValidatorPower: Array.isArray(object?.lastValidatorPower) + ? object.lastValidatorPower.map((e: any) => LastValidatorPower.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllLastValidatorPowerResponse): unknown { + const obj: any = {}; + if (message.lastValidatorPower) { + obj.lastValidatorPower = message.lastValidatorPower.map((e) => e ? LastValidatorPower.toJSON(e) : undefined); + } else { + obj.lastValidatorPower = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllLastValidatorPowerResponse { + const message = createBaseQueryAllLastValidatorPowerResponse(); + message.lastValidatorPower = object.lastValidatorPower?.map((e) => LastValidatorPower.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetProposedDisableValidatorRequest(): QueryGetProposedDisableValidatorRequest { + return { address: "" }; +} + +export const QueryGetProposedDisableValidatorRequest = { + encode(message: QueryGetProposedDisableValidatorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetProposedDisableValidatorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetProposedDisableValidatorRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetProposedDisableValidatorRequest { + return { address: isSet(object.address) ? String(object.address) : "" }; + }, + + toJSON(message: QueryGetProposedDisableValidatorRequest): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetProposedDisableValidatorRequest { + const message = createBaseQueryGetProposedDisableValidatorRequest(); + message.address = object.address ?? ""; + return message; + }, +}; + +function createBaseQueryGetProposedDisableValidatorResponse(): QueryGetProposedDisableValidatorResponse { + return { proposedDisableValidator: undefined }; +} + +export const QueryGetProposedDisableValidatorResponse = { + encode(message: QueryGetProposedDisableValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.proposedDisableValidator !== undefined) { + ProposedDisableValidator.encode(message.proposedDisableValidator, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetProposedDisableValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetProposedDisableValidatorResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.proposedDisableValidator = ProposedDisableValidator.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetProposedDisableValidatorResponse { + return { + proposedDisableValidator: isSet(object.proposedDisableValidator) + ? ProposedDisableValidator.fromJSON(object.proposedDisableValidator) + : undefined, + }; + }, + + toJSON(message: QueryGetProposedDisableValidatorResponse): unknown { + const obj: any = {}; + message.proposedDisableValidator !== undefined && (obj.proposedDisableValidator = message.proposedDisableValidator + ? ProposedDisableValidator.toJSON(message.proposedDisableValidator) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetProposedDisableValidatorResponse { + const message = createBaseQueryGetProposedDisableValidatorResponse(); + message.proposedDisableValidator = + (object.proposedDisableValidator !== undefined && object.proposedDisableValidator !== null) + ? ProposedDisableValidator.fromPartial(object.proposedDisableValidator) + : undefined; + return message; + }, +}; + +function createBaseQueryAllProposedDisableValidatorRequest(): QueryAllProposedDisableValidatorRequest { + return { pagination: undefined }; +} + +export const QueryAllProposedDisableValidatorRequest = { + encode(message: QueryAllProposedDisableValidatorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllProposedDisableValidatorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllProposedDisableValidatorRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllProposedDisableValidatorRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllProposedDisableValidatorRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllProposedDisableValidatorRequest { + const message = createBaseQueryAllProposedDisableValidatorRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllProposedDisableValidatorResponse(): QueryAllProposedDisableValidatorResponse { + return { proposedDisableValidator: [], pagination: undefined }; +} + +export const QueryAllProposedDisableValidatorResponse = { + encode(message: QueryAllProposedDisableValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.proposedDisableValidator) { + ProposedDisableValidator.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllProposedDisableValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllProposedDisableValidatorResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.proposedDisableValidator.push(ProposedDisableValidator.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllProposedDisableValidatorResponse { + return { + proposedDisableValidator: Array.isArray(object?.proposedDisableValidator) + ? object.proposedDisableValidator.map((e: any) => ProposedDisableValidator.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllProposedDisableValidatorResponse): unknown { + const obj: any = {}; + if (message.proposedDisableValidator) { + obj.proposedDisableValidator = message.proposedDisableValidator.map((e) => + e ? ProposedDisableValidator.toJSON(e) : undefined + ); + } else { + obj.proposedDisableValidator = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllProposedDisableValidatorResponse { + const message = createBaseQueryAllProposedDisableValidatorResponse(); + message.proposedDisableValidator = + object.proposedDisableValidator?.map((e) => ProposedDisableValidator.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetDisabledValidatorRequest(): QueryGetDisabledValidatorRequest { + return { address: "" }; +} + +export const QueryGetDisabledValidatorRequest = { + encode(message: QueryGetDisabledValidatorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetDisabledValidatorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetDisabledValidatorRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetDisabledValidatorRequest { + return { address: isSet(object.address) ? String(object.address) : "" }; + }, + + toJSON(message: QueryGetDisabledValidatorRequest): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetDisabledValidatorRequest { + const message = createBaseQueryGetDisabledValidatorRequest(); + message.address = object.address ?? ""; + return message; + }, +}; + +function createBaseQueryGetDisabledValidatorResponse(): QueryGetDisabledValidatorResponse { + return { disabledValidator: undefined }; +} + +export const QueryGetDisabledValidatorResponse = { + encode(message: QueryGetDisabledValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.disabledValidator !== undefined) { + DisabledValidator.encode(message.disabledValidator, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetDisabledValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetDisabledValidatorResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.disabledValidator = DisabledValidator.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetDisabledValidatorResponse { + return { + disabledValidator: isSet(object.disabledValidator) + ? DisabledValidator.fromJSON(object.disabledValidator) + : undefined, + }; + }, + + toJSON(message: QueryGetDisabledValidatorResponse): unknown { + const obj: any = {}; + message.disabledValidator !== undefined && (obj.disabledValidator = message.disabledValidator + ? DisabledValidator.toJSON(message.disabledValidator) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetDisabledValidatorResponse { + const message = createBaseQueryGetDisabledValidatorResponse(); + message.disabledValidator = (object.disabledValidator !== undefined && object.disabledValidator !== null) + ? DisabledValidator.fromPartial(object.disabledValidator) + : undefined; + return message; + }, +}; + +function createBaseQueryAllDisabledValidatorRequest(): QueryAllDisabledValidatorRequest { + return { pagination: undefined }; +} + +export const QueryAllDisabledValidatorRequest = { + encode(message: QueryAllDisabledValidatorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllDisabledValidatorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllDisabledValidatorRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllDisabledValidatorRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllDisabledValidatorRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllDisabledValidatorRequest { + const message = createBaseQueryAllDisabledValidatorRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllDisabledValidatorResponse(): QueryAllDisabledValidatorResponse { + return { disabledValidator: [], pagination: undefined }; +} + +export const QueryAllDisabledValidatorResponse = { + encode(message: QueryAllDisabledValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.disabledValidator) { + DisabledValidator.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllDisabledValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllDisabledValidatorResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.disabledValidator.push(DisabledValidator.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllDisabledValidatorResponse { + return { + disabledValidator: Array.isArray(object?.disabledValidator) + ? object.disabledValidator.map((e: any) => DisabledValidator.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllDisabledValidatorResponse): unknown { + const obj: any = {}; + if (message.disabledValidator) { + obj.disabledValidator = message.disabledValidator.map((e) => e ? DisabledValidator.toJSON(e) : undefined); + } else { + obj.disabledValidator = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllDisabledValidatorResponse { + const message = createBaseQueryAllDisabledValidatorResponse(); + message.disabledValidator = object.disabledValidator?.map((e) => DisabledValidator.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryGetRejectedDisableValidatorRequest(): QueryGetRejectedDisableValidatorRequest { + return { owner: "" }; +} + +export const QueryGetRejectedDisableValidatorRequest = { + encode(message: QueryGetRejectedDisableValidatorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.owner !== "") { + writer.uint32(10).string(message.owner); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRejectedDisableValidatorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRejectedDisableValidatorRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.owner = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRejectedDisableValidatorRequest { + return { owner: isSet(object.owner) ? String(object.owner) : "" }; + }, + + toJSON(message: QueryGetRejectedDisableValidatorRequest): unknown { + const obj: any = {}; + message.owner !== undefined && (obj.owner = message.owner); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRejectedDisableValidatorRequest { + const message = createBaseQueryGetRejectedDisableValidatorRequest(); + message.owner = object.owner ?? ""; + return message; + }, +}; + +function createBaseQueryGetRejectedDisableValidatorResponse(): QueryGetRejectedDisableValidatorResponse { + return { rejectedValidator: undefined }; +} + +export const QueryGetRejectedDisableValidatorResponse = { + encode(message: QueryGetRejectedDisableValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.rejectedValidator !== undefined) { + RejectedDisableValidator.encode(message.rejectedValidator, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetRejectedDisableValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetRejectedDisableValidatorResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rejectedValidator = RejectedDisableValidator.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetRejectedDisableValidatorResponse { + return { + rejectedValidator: isSet(object.rejectedValidator) + ? RejectedDisableValidator.fromJSON(object.rejectedValidator) + : undefined, + }; + }, + + toJSON(message: QueryGetRejectedDisableValidatorResponse): unknown { + const obj: any = {}; + message.rejectedValidator !== undefined && (obj.rejectedValidator = message.rejectedValidator + ? RejectedDisableValidator.toJSON(message.rejectedValidator) + : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryGetRejectedDisableValidatorResponse { + const message = createBaseQueryGetRejectedDisableValidatorResponse(); + message.rejectedValidator = (object.rejectedValidator !== undefined && object.rejectedValidator !== null) + ? RejectedDisableValidator.fromPartial(object.rejectedValidator) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRejectedDisableValidatorRequest(): QueryAllRejectedDisableValidatorRequest { + return { pagination: undefined }; +} + +export const QueryAllRejectedDisableValidatorRequest = { + encode(message: QueryAllRejectedDisableValidatorRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRejectedDisableValidatorRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRejectedDisableValidatorRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRejectedDisableValidatorRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllRejectedDisableValidatorRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllRejectedDisableValidatorRequest { + const message = createBaseQueryAllRejectedDisableValidatorRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllRejectedDisableValidatorResponse(): QueryAllRejectedDisableValidatorResponse { + return { rejectedValidator: [], pagination: undefined }; +} + +export const QueryAllRejectedDisableValidatorResponse = { + encode(message: QueryAllRejectedDisableValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rejectedValidator) { + RejectedDisableValidator.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllRejectedDisableValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllRejectedDisableValidatorResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rejectedValidator.push(RejectedDisableValidator.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllRejectedDisableValidatorResponse { + return { + rejectedValidator: Array.isArray(object?.rejectedValidator) + ? object.rejectedValidator.map((e: any) => RejectedDisableValidator.fromJSON(e)) + : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllRejectedDisableValidatorResponse): unknown { + const obj: any = {}; + if (message.rejectedValidator) { + obj.rejectedValidator = message.rejectedValidator.map((e) => e ? RejectedDisableValidator.toJSON(e) : undefined); + } else { + obj.rejectedValidator = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): QueryAllRejectedDisableValidatorResponse { + const message = createBaseQueryAllRejectedDisableValidatorResponse(); + message.rejectedValidator = object.rejectedValidator?.map((e) => RejectedDisableValidator.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +/** Query defines the gRPC querier service. */ +export interface Query { + /** Queries a validator by index. */ + Validator(request: QueryGetValidatorRequest): Promise; + /** Queries a list of validator items. */ + ValidatorAll(request: QueryAllValidatorRequest): Promise; + /** Queries a lastValidatorPower by index. */ + LastValidatorPower(request: QueryGetLastValidatorPowerRequest): Promise; + /** Queries a list of lastValidatorPower items. */ + LastValidatorPowerAll(request: QueryAllLastValidatorPowerRequest): Promise; + /** Queries a ProposedDisableValidator by index. */ + ProposedDisableValidator( + request: QueryGetProposedDisableValidatorRequest, + ): Promise; + /** Queries a list of ProposedDisableValidator items. */ + ProposedDisableValidatorAll( + request: QueryAllProposedDisableValidatorRequest, + ): Promise; + /** Queries a DisabledValidator by index. */ + DisabledValidator(request: QueryGetDisabledValidatorRequest): Promise; + /** Queries a list of DisabledValidator items. */ + DisabledValidatorAll(request: QueryAllDisabledValidatorRequest): Promise; + /** Queries a RejectedNode by index. */ + RejectedDisableValidator( + request: QueryGetRejectedDisableValidatorRequest, + ): Promise; + /** Queries a list of RejectedNode items. */ + RejectedDisableValidatorAll( + request: QueryAllRejectedDisableValidatorRequest, + ): Promise; +} + +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.Validator = this.Validator.bind(this); + this.ValidatorAll = this.ValidatorAll.bind(this); + this.LastValidatorPower = this.LastValidatorPower.bind(this); + this.LastValidatorPowerAll = this.LastValidatorPowerAll.bind(this); + this.ProposedDisableValidator = this.ProposedDisableValidator.bind(this); + this.ProposedDisableValidatorAll = this.ProposedDisableValidatorAll.bind(this); + this.DisabledValidator = this.DisabledValidator.bind(this); + this.DisabledValidatorAll = this.DisabledValidatorAll.bind(this); + this.RejectedDisableValidator = this.RejectedDisableValidator.bind(this); + this.RejectedDisableValidatorAll = this.RejectedDisableValidatorAll.bind(this); + } + Validator(request: QueryGetValidatorRequest): Promise { + const data = QueryGetValidatorRequest.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.validator.Query", "Validator", data); + return promise.then((data) => QueryGetValidatorResponse.decode(new _m0.Reader(data))); + } + + ValidatorAll(request: QueryAllValidatorRequest): Promise { + const data = QueryAllValidatorRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Query", + "ValidatorAll", + data, + ); + return promise.then((data) => QueryAllValidatorResponse.decode(new _m0.Reader(data))); + } + + LastValidatorPower(request: QueryGetLastValidatorPowerRequest): Promise { + const data = QueryGetLastValidatorPowerRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Query", + "LastValidatorPower", + data, + ); + return promise.then((data) => QueryGetLastValidatorPowerResponse.decode(new _m0.Reader(data))); + } + + LastValidatorPowerAll(request: QueryAllLastValidatorPowerRequest): Promise { + const data = QueryAllLastValidatorPowerRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Query", + "LastValidatorPowerAll", + data, + ); + return promise.then((data) => QueryAllLastValidatorPowerResponse.decode(new _m0.Reader(data))); + } + + ProposedDisableValidator( + request: QueryGetProposedDisableValidatorRequest, + ): Promise { + const data = QueryGetProposedDisableValidatorRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Query", + "ProposedDisableValidator", + data, + ); + return promise.then((data) => QueryGetProposedDisableValidatorResponse.decode(new _m0.Reader(data))); + } + + ProposedDisableValidatorAll( + request: QueryAllProposedDisableValidatorRequest, + ): Promise { + const data = QueryAllProposedDisableValidatorRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Query", + "ProposedDisableValidatorAll", + data, + ); + return promise.then((data) => QueryAllProposedDisableValidatorResponse.decode(new _m0.Reader(data))); + } + + DisabledValidator(request: QueryGetDisabledValidatorRequest): Promise { + const data = QueryGetDisabledValidatorRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Query", + "DisabledValidator", + data, + ); + return promise.then((data) => QueryGetDisabledValidatorResponse.decode(new _m0.Reader(data))); + } + + DisabledValidatorAll(request: QueryAllDisabledValidatorRequest): Promise { + const data = QueryAllDisabledValidatorRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Query", + "DisabledValidatorAll", + data, + ); + return promise.then((data) => QueryAllDisabledValidatorResponse.decode(new _m0.Reader(data))); + } + + RejectedDisableValidator( + request: QueryGetRejectedDisableValidatorRequest, + ): Promise { + const data = QueryGetRejectedDisableValidatorRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Query", + "RejectedDisableValidator", + data, + ); + return promise.then((data) => QueryGetRejectedDisableValidatorResponse.decode(new _m0.Reader(data))); + } + + RejectedDisableValidatorAll( + request: QueryAllRejectedDisableValidatorRequest, + ): Promise { + const data = QueryAllRejectedDisableValidatorRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Query", + "RejectedDisableValidatorAll", + data, + ); + return promise.then((data) => QueryAllRejectedDisableValidatorResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/rejected_validator.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/rejected_validator.ts new file mode 100644 index 000000000..c1f8a7879 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/rejected_validator.ts @@ -0,0 +1,111 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Grant } from "./grant"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.validator"; + +export interface RejectedDisableValidator { + address: string; + creator: string; + approvals: Grant[]; + rejects: Grant[]; +} + +function createBaseRejectedDisableValidator(): RejectedDisableValidator { + return { address: "", creator: "", approvals: [], rejects: [] }; +} + +export const RejectedDisableValidator = { + encode(message: RejectedDisableValidator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.address !== "") { + writer.uint32(10).string(message.address); + } + if (message.creator !== "") { + writer.uint32(18).string(message.creator); + } + for (const v of message.approvals) { + Grant.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.rejects) { + Grant.encode(v!, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): RejectedDisableValidator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRejectedDisableValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + case 2: + message.creator = reader.string(); + break; + case 3: + message.approvals.push(Grant.decode(reader, reader.uint32())); + break; + case 4: + message.rejects.push(Grant.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): RejectedDisableValidator { + return { + address: isSet(object.address) ? String(object.address) : "", + creator: isSet(object.creator) ? String(object.creator) : "", + approvals: Array.isArray(object?.approvals) ? object.approvals.map((e: any) => Grant.fromJSON(e)) : [], + rejects: Array.isArray(object?.rejects) ? object.rejects.map((e: any) => Grant.fromJSON(e)) : [], + }; + }, + + toJSON(message: RejectedDisableValidator): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + message.creator !== undefined && (obj.creator = message.creator); + if (message.approvals) { + obj.approvals = message.approvals.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.approvals = []; + } + if (message.rejects) { + obj.rejects = message.rejects.map((e) => e ? Grant.toJSON(e) : undefined); + } else { + obj.rejects = []; + } + return obj; + }, + + fromPartial, I>>(object: I): RejectedDisableValidator { + const message = createBaseRejectedDisableValidator(); + message.address = object.address ?? ""; + message.creator = object.creator ?? ""; + message.approvals = object.approvals?.map((e) => Grant.fromPartial(e)) || []; + message.rejects = object.rejects?.map((e) => Grant.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/tx.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/tx.ts new file mode 100644 index 000000000..ca2890622 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/tx.ts @@ -0,0 +1,827 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; +import { Any } from "../../../google/protobuf/any"; +import { Description } from "./description"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.validator"; + +export interface MsgCreateValidator { + signer: string; + pubKey: Any | undefined; + description: Description | undefined; +} + +export interface MsgCreateValidatorResponse { +} + +export interface MsgProposeDisableValidator { + creator: string; + address: string; + info: string; + time: number; +} + +export interface MsgProposeDisableValidatorResponse { +} + +export interface MsgApproveDisableValidator { + creator: string; + address: string; + info: string; + time: number; +} + +export interface MsgApproveDisableValidatorResponse { +} + +export interface MsgDisableValidator { + creator: string; +} + +export interface MsgDisableValidatorResponse { +} + +export interface MsgEnableValidator { + creator: string; +} + +export interface MsgEnableValidatorResponse { +} + +export interface MsgRejectDisableValidator { + creator: string; + address: string; + info: string; + time: number; +} + +export interface MsgRejectDisableValidatorResponse { +} + +function createBaseMsgCreateValidator(): MsgCreateValidator { + return { signer: "", pubKey: undefined, description: undefined }; +} + +export const MsgCreateValidator = { + encode(message: MsgCreateValidator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.signer !== "") { + writer.uint32(10).string(message.signer); + } + if (message.pubKey !== undefined) { + Any.encode(message.pubKey, writer.uint32(18).fork()).ldelim(); + } + if (message.description !== undefined) { + Description.encode(message.description, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateValidator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.signer = reader.string(); + break; + case 2: + message.pubKey = Any.decode(reader, reader.uint32()); + break; + case 3: + message.description = Description.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgCreateValidator { + return { + signer: isSet(object.signer) ? String(object.signer) : "", + pubKey: isSet(object.pubKey) ? Any.fromJSON(object.pubKey) : undefined, + description: isSet(object.description) ? Description.fromJSON(object.description) : undefined, + }; + }, + + toJSON(message: MsgCreateValidator): unknown { + const obj: any = {}; + message.signer !== undefined && (obj.signer = message.signer); + message.pubKey !== undefined && (obj.pubKey = message.pubKey ? Any.toJSON(message.pubKey) : undefined); + message.description !== undefined + && (obj.description = message.description ? Description.toJSON(message.description) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): MsgCreateValidator { + const message = createBaseMsgCreateValidator(); + message.signer = object.signer ?? ""; + message.pubKey = (object.pubKey !== undefined && object.pubKey !== null) + ? Any.fromPartial(object.pubKey) + : undefined; + message.description = (object.description !== undefined && object.description !== null) + ? Description.fromPartial(object.description) + : undefined; + return message; + }, +}; + +function createBaseMsgCreateValidatorResponse(): MsgCreateValidatorResponse { + return {}; +} + +export const MsgCreateValidatorResponse = { + encode(_: MsgCreateValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateValidatorResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgCreateValidatorResponse { + return {}; + }, + + toJSON(_: MsgCreateValidatorResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgCreateValidatorResponse { + const message = createBaseMsgCreateValidatorResponse(); + return message; + }, +}; + +function createBaseMsgProposeDisableValidator(): MsgProposeDisableValidator { + return { creator: "", address: "", info: "", time: 0 }; +} + +export const MsgProposeDisableValidator = { + encode(message: MsgProposeDisableValidator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + if (message.time !== 0) { + writer.uint32(32).int64(message.time); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgProposeDisableValidator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProposeDisableValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.address = reader.string(); + break; + case 3: + message.info = reader.string(); + break; + case 4: + message.time = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgProposeDisableValidator { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + address: isSet(object.address) ? String(object.address) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + }; + }, + + toJSON(message: MsgProposeDisableValidator): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.address !== undefined && (obj.address = message.address); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + return obj; + }, + + fromPartial, I>>(object: I): MsgProposeDisableValidator { + const message = createBaseMsgProposeDisableValidator(); + message.creator = object.creator ?? ""; + message.address = object.address ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + return message; + }, +}; + +function createBaseMsgProposeDisableValidatorResponse(): MsgProposeDisableValidatorResponse { + return {}; +} + +export const MsgProposeDisableValidatorResponse = { + encode(_: MsgProposeDisableValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgProposeDisableValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProposeDisableValidatorResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgProposeDisableValidatorResponse { + return {}; + }, + + toJSON(_: MsgProposeDisableValidatorResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): MsgProposeDisableValidatorResponse { + const message = createBaseMsgProposeDisableValidatorResponse(); + return message; + }, +}; + +function createBaseMsgApproveDisableValidator(): MsgApproveDisableValidator { + return { creator: "", address: "", info: "", time: 0 }; +} + +export const MsgApproveDisableValidator = { + encode(message: MsgApproveDisableValidator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + if (message.time !== 0) { + writer.uint32(32).int64(message.time); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgApproveDisableValidator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgApproveDisableValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.address = reader.string(); + break; + case 3: + message.info = reader.string(); + break; + case 4: + message.time = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgApproveDisableValidator { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + address: isSet(object.address) ? String(object.address) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + }; + }, + + toJSON(message: MsgApproveDisableValidator): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.address !== undefined && (obj.address = message.address); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + return obj; + }, + + fromPartial, I>>(object: I): MsgApproveDisableValidator { + const message = createBaseMsgApproveDisableValidator(); + message.creator = object.creator ?? ""; + message.address = object.address ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + return message; + }, +}; + +function createBaseMsgApproveDisableValidatorResponse(): MsgApproveDisableValidatorResponse { + return {}; +} + +export const MsgApproveDisableValidatorResponse = { + encode(_: MsgApproveDisableValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgApproveDisableValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgApproveDisableValidatorResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgApproveDisableValidatorResponse { + return {}; + }, + + toJSON(_: MsgApproveDisableValidatorResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): MsgApproveDisableValidatorResponse { + const message = createBaseMsgApproveDisableValidatorResponse(); + return message; + }, +}; + +function createBaseMsgDisableValidator(): MsgDisableValidator { + return { creator: "" }; +} + +export const MsgDisableValidator = { + encode(message: MsgDisableValidator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDisableValidator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDisableValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgDisableValidator { + return { creator: isSet(object.creator) ? String(object.creator) : "" }; + }, + + toJSON(message: MsgDisableValidator): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + return obj; + }, + + fromPartial, I>>(object: I): MsgDisableValidator { + const message = createBaseMsgDisableValidator(); + message.creator = object.creator ?? ""; + return message; + }, +}; + +function createBaseMsgDisableValidatorResponse(): MsgDisableValidatorResponse { + return {}; +} + +export const MsgDisableValidatorResponse = { + encode(_: MsgDisableValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgDisableValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDisableValidatorResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgDisableValidatorResponse { + return {}; + }, + + toJSON(_: MsgDisableValidatorResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgDisableValidatorResponse { + const message = createBaseMsgDisableValidatorResponse(); + return message; + }, +}; + +function createBaseMsgEnableValidator(): MsgEnableValidator { + return { creator: "" }; +} + +export const MsgEnableValidator = { + encode(message: MsgEnableValidator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgEnableValidator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgEnableValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgEnableValidator { + return { creator: isSet(object.creator) ? String(object.creator) : "" }; + }, + + toJSON(message: MsgEnableValidator): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + return obj; + }, + + fromPartial, I>>(object: I): MsgEnableValidator { + const message = createBaseMsgEnableValidator(); + message.creator = object.creator ?? ""; + return message; + }, +}; + +function createBaseMsgEnableValidatorResponse(): MsgEnableValidatorResponse { + return {}; +} + +export const MsgEnableValidatorResponse = { + encode(_: MsgEnableValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgEnableValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgEnableValidatorResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgEnableValidatorResponse { + return {}; + }, + + toJSON(_: MsgEnableValidatorResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgEnableValidatorResponse { + const message = createBaseMsgEnableValidatorResponse(); + return message; + }, +}; + +function createBaseMsgRejectDisableValidator(): MsgRejectDisableValidator { + return { creator: "", address: "", info: "", time: 0 }; +} + +export const MsgRejectDisableValidator = { + encode(message: MsgRejectDisableValidator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.address !== "") { + writer.uint32(18).string(message.address); + } + if (message.info !== "") { + writer.uint32(26).string(message.info); + } + if (message.time !== 0) { + writer.uint32(32).int64(message.time); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRejectDisableValidator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRejectDisableValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.address = reader.string(); + break; + case 3: + message.info = reader.string(); + break; + case 4: + message.time = longToNumber(reader.int64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgRejectDisableValidator { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + address: isSet(object.address) ? String(object.address) : "", + info: isSet(object.info) ? String(object.info) : "", + time: isSet(object.time) ? Number(object.time) : 0, + }; + }, + + toJSON(message: MsgRejectDisableValidator): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.address !== undefined && (obj.address = message.address); + message.info !== undefined && (obj.info = message.info); + message.time !== undefined && (obj.time = Math.round(message.time)); + return obj; + }, + + fromPartial, I>>(object: I): MsgRejectDisableValidator { + const message = createBaseMsgRejectDisableValidator(); + message.creator = object.creator ?? ""; + message.address = object.address ?? ""; + message.info = object.info ?? ""; + message.time = object.time ?? 0; + return message; + }, +}; + +function createBaseMsgRejectDisableValidatorResponse(): MsgRejectDisableValidatorResponse { + return {}; +} + +export const MsgRejectDisableValidatorResponse = { + encode(_: MsgRejectDisableValidatorResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgRejectDisableValidatorResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgRejectDisableValidatorResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgRejectDisableValidatorResponse { + return {}; + }, + + toJSON(_: MsgRejectDisableValidatorResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>( + _: I, + ): MsgRejectDisableValidatorResponse { + const message = createBaseMsgRejectDisableValidatorResponse(); + return message; + }, +}; + +/** Msg defines the Msg service. */ +export interface Msg { + CreateValidator(request: MsgCreateValidator): Promise; + ProposeDisableValidator(request: MsgProposeDisableValidator): Promise; + ApproveDisableValidator(request: MsgApproveDisableValidator): Promise; + DisableValidator(request: MsgDisableValidator): Promise; + EnableValidator(request: MsgEnableValidator): Promise; + /** this line is used by starport scaffolding # proto/tx/rpc */ + RejectDisableValidator(request: MsgRejectDisableValidator): Promise; +} + +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.CreateValidator = this.CreateValidator.bind(this); + this.ProposeDisableValidator = this.ProposeDisableValidator.bind(this); + this.ApproveDisableValidator = this.ApproveDisableValidator.bind(this); + this.DisableValidator = this.DisableValidator.bind(this); + this.EnableValidator = this.EnableValidator.bind(this); + this.RejectDisableValidator = this.RejectDisableValidator.bind(this); + } + CreateValidator(request: MsgCreateValidator): Promise { + const data = MsgCreateValidator.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Msg", + "CreateValidator", + data, + ); + return promise.then((data) => MsgCreateValidatorResponse.decode(new _m0.Reader(data))); + } + + ProposeDisableValidator(request: MsgProposeDisableValidator): Promise { + const data = MsgProposeDisableValidator.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Msg", + "ProposeDisableValidator", + data, + ); + return promise.then((data) => MsgProposeDisableValidatorResponse.decode(new _m0.Reader(data))); + } + + ApproveDisableValidator(request: MsgApproveDisableValidator): Promise { + const data = MsgApproveDisableValidator.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Msg", + "ApproveDisableValidator", + data, + ); + return promise.then((data) => MsgApproveDisableValidatorResponse.decode(new _m0.Reader(data))); + } + + DisableValidator(request: MsgDisableValidator): Promise { + const data = MsgDisableValidator.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Msg", + "DisableValidator", + data, + ); + return promise.then((data) => MsgDisableValidatorResponse.decode(new _m0.Reader(data))); + } + + EnableValidator(request: MsgEnableValidator): Promise { + const data = MsgEnableValidator.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Msg", + "EnableValidator", + data, + ); + return promise.then((data) => MsgEnableValidatorResponse.decode(new _m0.Reader(data))); + } + + RejectDisableValidator(request: MsgRejectDisableValidator): Promise { + const data = MsgRejectDisableValidator.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.validator.Msg", + "RejectDisableValidator", + data, + ); + return promise.then((data) => MsgRejectDisableValidatorResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/validator.ts b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/validator.ts new file mode 100644 index 000000000..e2161c162 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.validator/types/zigbeealliance/distributedcomplianceledger/validator/validator.ts @@ -0,0 +1,139 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { Any } from "../../../google/protobuf/any"; +import { Description } from "./description"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.validator"; + +export interface Validator { + /** the account address of validator owner */ + owner: string; + /** description of the validator */ + description: + | Description + | undefined; + /** the consensus public key of the tendermint validator */ + pubKey: + | Any + | undefined; + /** validator consensus power */ + power: number; + /** has the validator been removed from validator set */ + jailed: boolean; + /** the reason of validator jailing */ + jailedReason: string; +} + +function createBaseValidator(): Validator { + return { owner: "", description: undefined, pubKey: undefined, power: 0, jailed: false, jailedReason: "" }; +} + +export const Validator = { + encode(message: Validator, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.owner !== "") { + writer.uint32(10).string(message.owner); + } + if (message.description !== undefined) { + Description.encode(message.description, writer.uint32(18).fork()).ldelim(); + } + if (message.pubKey !== undefined) { + Any.encode(message.pubKey, writer.uint32(26).fork()).ldelim(); + } + if (message.power !== 0) { + writer.uint32(32).int32(message.power); + } + if (message.jailed === true) { + writer.uint32(40).bool(message.jailed); + } + if (message.jailedReason !== "") { + writer.uint32(50).string(message.jailedReason); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Validator { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseValidator(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.owner = reader.string(); + break; + case 2: + message.description = Description.decode(reader, reader.uint32()); + break; + case 3: + message.pubKey = Any.decode(reader, reader.uint32()); + break; + case 4: + message.power = reader.int32(); + break; + case 5: + message.jailed = reader.bool(); + break; + case 6: + message.jailedReason = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Validator { + return { + owner: isSet(object.owner) ? String(object.owner) : "", + description: isSet(object.description) ? Description.fromJSON(object.description) : undefined, + pubKey: isSet(object.pubKey) ? Any.fromJSON(object.pubKey) : undefined, + power: isSet(object.power) ? Number(object.power) : 0, + jailed: isSet(object.jailed) ? Boolean(object.jailed) : false, + jailedReason: isSet(object.jailedReason) ? String(object.jailedReason) : "", + }; + }, + + toJSON(message: Validator): unknown { + const obj: any = {}; + message.owner !== undefined && (obj.owner = message.owner); + message.description !== undefined + && (obj.description = message.description ? Description.toJSON(message.description) : undefined); + message.pubKey !== undefined && (obj.pubKey = message.pubKey ? Any.toJSON(message.pubKey) : undefined); + message.power !== undefined && (obj.power = Math.round(message.power)); + message.jailed !== undefined && (obj.jailed = message.jailed); + message.jailedReason !== undefined && (obj.jailedReason = message.jailedReason); + return obj; + }, + + fromPartial, I>>(object: I): Validator { + const message = createBaseValidator(); + message.owner = object.owner ?? ""; + message.description = (object.description !== undefined && object.description !== null) + ? Description.fromPartial(object.description) + : undefined; + message.pubKey = (object.pubKey !== undefined && object.pubKey !== null) + ? Any.fromPartial(object.pubKey) + : undefined; + message.power = object.power ?? 0; + message.jailed = object.jailed ?? false; + message.jailedReason = object.jailedReason ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/api.swagger.yml b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/api.swagger.yml new file mode 100644 index 000000000..6f0747ad1 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/api.swagger.yml @@ -0,0 +1,269 @@ +swagger: '2.0' +info: + title: HTTP API Console zigbeealliance.distributedcomplianceledger.vendorinfo + name: '' + description: '' +paths: + /dcl/vendorinfo/vendors: + get: + operationId: VendorInfoAll + responses: + '200': + description: A successful response. + schema: + type: object + properties: + vendorInfo: + type: array + items: + type: object + properties: + vendorID: + type: integer + format: int32 + vendorName: + type: string + companyLegalName: + type: string + companyPreferredName: + type: string + vendorLandingPageURL: + type: string + creator: + type: string + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: pagination.key + in: query + required: false + type: string + format: byte + - name: pagination.offset + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + in: query + required: false + type: boolean + - name: pagination.reverse + in: query + required: false + type: boolean + tags: + - Query + /dcl/vendorinfo/vendors/{vendorID}: + get: + operationId: VendorInfo + responses: + '200': + description: A successful response. + schema: + type: object + properties: + vendorInfo: + type: object + properties: + vendorID: + type: integer + format: int32 + vendorName: + type: string + companyLegalName: + type: string + companyPreferredName: + type: string + vendorLandingPageURL: + type: string + creator: + type: string + schemaVersion: + type: integer + format: int64 + default: + description: An unexpected error response. + schema: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + parameters: + - name: vendorID + in: path + required: true + type: integer + format: int32 + tags: + - Query +definitions: + Any: + type: object + properties: + '@type': + type: string + additionalProperties: {} + Status: + type: object + properties: + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + '@type': + type: string + additionalProperties: {} + PageRequest: + type: object + properties: + key: + type: string + format: byte + offset: + type: string + format: uint64 + limit: + type: string + format: uint64 + count_total: + type: boolean + reverse: + type: boolean + PageResponse: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryAllVendorInfoResponse: + type: object + properties: + vendorInfo: + type: array + items: + type: object + properties: + vendorID: + type: integer + format: int32 + vendorName: + type: string + companyLegalName: + type: string + companyPreferredName: + type: string + vendorLandingPageURL: + type: string + creator: + type: string + schemaVersion: + type: integer + format: int64 + pagination: + type: object + properties: + next_key: + type: string + format: byte + total: + type: string + format: uint64 + QueryGetVendorInfoResponse: + type: object + properties: + vendorInfo: + type: object + properties: + vendorID: + type: integer + format: int32 + vendorName: + type: string + companyLegalName: + type: string + companyPreferredName: + type: string + vendorLandingPageURL: + type: string + creator: + type: string + schemaVersion: + type: integer + format: int64 + vendorinfo.VendorInfo: + type: object + properties: + vendorID: + type: integer + format: int32 + vendorName: + type: string + companyLegalName: + type: string + companyPreferredName: + type: string + vendorLandingPageURL: + type: string + creator: + type: string + schemaVersion: + type: integer + format: int64 + MsgCreateVendorInfoResponse: + type: object + MsgUpdateVendorInfoResponse: + type: object diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/index.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/index.ts new file mode 100755 index 000000000..c9dfa159e --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/index.ts @@ -0,0 +1,6 @@ +import Module from './module'; +import { txClient, queryClient, registry } from './module'; +import { msgTypes } from './registry'; + +export * from "./types"; +export { Module, msgTypes, txClient, queryClient, registry }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/module.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/module.ts new file mode 100755 index 000000000..f43f0c642 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/module.ts @@ -0,0 +1,164 @@ +// Generated by Ignite ignite.com/cli + +import { StdFee } from "@cosmjs/launchpad"; +import { SigningStargateClient, DeliverTxResponse } from "@cosmjs/stargate"; +import { EncodeObject, GeneratedType, OfflineSigner, Registry } from "@cosmjs/proto-signing"; +import { msgTypes } from './registry'; +import { IgniteClient } from "../client" +import { MissingWalletError } from "../helpers" +import { Api } from "./rest"; +import { MsgCreateVendorInfo } from "./types/zigbeealliance/distributedcomplianceledger/vendorinfo/tx"; +import { MsgUpdateVendorInfo } from "./types/zigbeealliance/distributedcomplianceledger/vendorinfo/tx"; + +import { VendorInfo as typeVendorInfo} from "./types" + +export { MsgCreateVendorInfo, MsgUpdateVendorInfo }; + +type sendMsgCreateVendorInfoParams = { + value: MsgCreateVendorInfo, + fee?: StdFee, + memo?: string +}; + +type sendMsgUpdateVendorInfoParams = { + value: MsgUpdateVendorInfo, + fee?: StdFee, + memo?: string +}; + + +type msgCreateVendorInfoParams = { + value: MsgCreateVendorInfo, +}; + +type msgUpdateVendorInfoParams = { + value: MsgUpdateVendorInfo, +}; + + +export const registry = new Registry(msgTypes); + +type Field = { + name: string; + type: unknown; +} +function getStructure(template) { + const structure: {fields: Field[]} = { fields: [] } + for (let [key, value] of Object.entries(template)) { + let field = { name: key, type: typeof value } + structure.fields.push(field) + } + return structure +} +const defaultFee = { + amount: [], + gas: "200000", +}; + +interface TxClientOptions { + addr: string + prefix: string + signer?: OfflineSigner +} + +export const txClient = ({ signer, prefix, addr }: TxClientOptions = { addr: "http://localhost:26657", prefix: "cosmos" }) => { + + return { + + async sendMsgCreateVendorInfo({ value, fee, memo }: sendMsgCreateVendorInfoParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgCreateVendorInfo: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgCreateVendorInfo({ value: MsgCreateVendorInfo.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgCreateVendorInfo: Could not broadcast Tx: '+ e.message) + } + }, + + async sendMsgUpdateVendorInfo({ value, fee, memo }: sendMsgUpdateVendorInfoParams): Promise { + if (!signer) { + throw new Error('TxClient:sendMsgUpdateVendorInfo: Unable to sign Tx. Signer is not present.') + } + try { + const { address } = (await signer.getAccounts())[0]; + const signingClient = await SigningStargateClient.connectWithSigner(addr,signer,{registry, prefix}); + let msg = this.msgUpdateVendorInfo({ value: MsgUpdateVendorInfo.fromPartial(value) }) + return await signingClient.signAndBroadcast(address, [msg], fee ? fee : defaultFee, memo) + } catch (e: any) { + throw new Error('TxClient:sendMsgUpdateVendorInfo: Could not broadcast Tx: '+ e.message) + } + }, + + + msgCreateVendorInfo({ value }: msgCreateVendorInfoParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.vendorinfo.MsgCreateVendorInfo", value: MsgCreateVendorInfo.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgCreateVendorInfo: Could not create message: ' + e.message) + } + }, + + msgUpdateVendorInfo({ value }: msgUpdateVendorInfoParams): EncodeObject { + try { + return { typeUrl: "/zigbeealliance.distributedcomplianceledger.vendorinfo.MsgUpdateVendorInfo", value: MsgUpdateVendorInfo.fromPartial( value ) } + } catch (e: any) { + throw new Error('TxClient:MsgUpdateVendorInfo: Could not create message: ' + e.message) + } + }, + + } +}; + +interface QueryClientOptions { + addr: string +} + +export const queryClient = ({ addr: addr }: QueryClientOptions = { addr: "http://localhost:1317" }) => { + return new Api({ baseURL: addr }); +}; + +class SDKModule { + public query: ReturnType; + public tx: ReturnType; + public structure: Record; + public registry: Array<[string, GeneratedType]> = []; + + constructor(client: IgniteClient) { + + this.query = queryClient({ addr: client.env.apiURL }); + this.updateTX(client); + this.structure = { + VendorInfo: getStructure(typeVendorInfo.fromPartial({})), + + }; + client.on('signer-changed',(signer) => { + this.updateTX(client); + }) + } + updateTX(client: IgniteClient) { + const methods = txClient({ + signer: client.signer, + addr: client.env.rpcURL, + prefix: client.env.prefix ?? "cosmos", + }) + + this.tx = methods; + for (let m in methods) { + this.tx[m] = methods[m].bind(this.tx); + } + } +}; + +const Module = (test: IgniteClient) => { + return { + module: { + ZigbeeallianceDistributedcomplianceledgerVendorinfo: new SDKModule(test) + }, + registry: msgTypes + } +} +export default Module; \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/registry.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/registry.ts new file mode 100755 index 000000000..523c72b23 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/registry.ts @@ -0,0 +1,11 @@ +import { GeneratedType } from "@cosmjs/proto-signing"; +import { MsgCreateVendorInfo } from "./types/zigbeealliance/distributedcomplianceledger/vendorinfo/tx"; +import { MsgUpdateVendorInfo } from "./types/zigbeealliance/distributedcomplianceledger/vendorinfo/tx"; + +const msgTypes: Array<[string, GeneratedType]> = [ + ["/zigbeealliance.distributedcomplianceledger.vendorinfo.MsgCreateVendorInfo", MsgCreateVendorInfo], + ["/zigbeealliance.distributedcomplianceledger.vendorinfo.MsgUpdateVendorInfo", MsgUpdateVendorInfo], + +]; + +export { msgTypes } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/rest.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/rest.ts new file mode 100644 index 000000000..35a612c4f --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/rest.ts @@ -0,0 +1,297 @@ +/* eslint-disable */ +/* tslint:disable */ +/* + * --------------------------------------------------------------- + * ## THIS FILE WAS GENERATED VIA SWAGGER-TYPESCRIPT-API ## + * ## ## + * ## AUTHOR: acacode ## + * ## SOURCE: https://github.com/acacode/swagger-typescript-api ## + * --------------------------------------------------------------- + */ + +export interface DistributedcomplianceledgervendorinfoVendorInfo { + /** @format int32 */ + vendorID?: number; + vendorName?: string; + companyLegalName?: string; + companyPreferredName?: string; + vendorLandingPageURL?: string; + creator?: string; + + /** @format int64 */ + schemaVersion?: number; +} + +export interface ProtobufAny { + "@type"?: string; +} + +export interface RpcStatus { + /** @format int32 */ + code?: number; + message?: string; + details?: ProtobufAny[]; +} + +/** +* message SomeRequest { + Foo some_parameter = 1; + PageRequest pagination = 2; + } +*/ +export interface V1Beta1PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + * @format byte + */ + key?: string; + + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + * @format uint64 + */ + offset?: string; + + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + * @format uint64 + */ + limit?: string; + + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + count_total?: boolean; + + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse?: boolean; +} + +/** +* PageResponse is to be embedded in gRPC response messages where the +corresponding request message has used PageRequest. + + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } +*/ +export interface V1Beta1PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + * @format byte + */ + next_key?: string; + + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + * @format uint64 + */ + total?: string; +} + +export type VendorinfoMsgCreateVendorInfoResponse = object; + +export type VendorinfoMsgUpdateVendorInfoResponse = object; + +export interface VendorinfoQueryAllVendorInfoResponse { + vendorInfo?: DistributedcomplianceledgervendorinfoVendorInfo[]; + + /** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ + pagination?: V1Beta1PageResponse; +} + +export interface VendorinfoQueryGetVendorInfoResponse { + vendorInfo?: DistributedcomplianceledgervendorinfoVendorInfo; +} + +import axios, { AxiosInstance, AxiosRequestConfig, AxiosResponse, ResponseType } from "axios"; + +export type QueryParamsType = Record; + +export interface FullRequestParams extends Omit { + /** set parameter to `true` for call `securityWorker` for this request */ + secure?: boolean; + /** request path */ + path: string; + /** content type of request body */ + type?: ContentType; + /** query params */ + query?: QueryParamsType; + /** format of response (i.e. response.json() -> format: "json") */ + format?: ResponseType; + /** request body */ + body?: unknown; +} + +export type RequestParams = Omit; + +export interface ApiConfig extends Omit { + securityWorker?: ( + securityData: SecurityDataType | null, + ) => Promise | AxiosRequestConfig | void; + secure?: boolean; + format?: ResponseType; +} + +export enum ContentType { + Json = "application/json", + FormData = "multipart/form-data", + UrlEncoded = "application/x-www-form-urlencoded", +} + +export class HttpClient { + public instance: AxiosInstance; + private securityData: SecurityDataType | null = null; + private securityWorker?: ApiConfig["securityWorker"]; + private secure?: boolean; + private format?: ResponseType; + + constructor({ securityWorker, secure, format, ...axiosConfig }: ApiConfig = {}) { + this.instance = axios.create({ ...axiosConfig, baseURL: axiosConfig.baseURL || "" }); + this.secure = secure; + this.format = format; + this.securityWorker = securityWorker; + } + + public setSecurityData = (data: SecurityDataType | null) => { + this.securityData = data; + }; + + private mergeRequestParams(params1: AxiosRequestConfig, params2?: AxiosRequestConfig): AxiosRequestConfig { + return { + ...this.instance.defaults, + ...params1, + ...(params2 || {}), + headers: { + ...(this.instance.defaults.headers || {}), + ...(params1.headers || {}), + ...((params2 && params2.headers) || {}), + }, + }; + } + + private createFormData(input: Record): FormData { + return Object.keys(input || {}).reduce((formData, key) => { + const property = input[key]; + formData.append( + key, + property instanceof Blob + ? property + : typeof property === "object" && property !== null + ? JSON.stringify(property) + : `${property}`, + ); + return formData; + }, new FormData()); + } + + public request = async ({ + secure, + path, + type, + query, + format, + body, + ...params + }: FullRequestParams): Promise> => { + const secureParams = + ((typeof secure === "boolean" ? secure : this.secure) && + this.securityWorker && + (await this.securityWorker(this.securityData))) || + {}; + const requestParams = this.mergeRequestParams(params, secureParams); + const responseFormat = (format && this.format) || void 0; + + if (type === ContentType.FormData && body && body !== null && typeof body === "object") { + requestParams.headers.common = { Accept: "*/*" }; + requestParams.headers.post = {}; + requestParams.headers.put = {}; + + body = this.createFormData(body as Record); + } + + return this.instance.request({ + ...requestParams, + headers: { + ...(type && type !== ContentType.FormData ? { "Content-Type": type } : {}), + ...(requestParams.headers || {}), + }, + params: query, + responseType: responseFormat, + data: body, + url: path, + }); + }; +} + +/** + * @title zigbeealliance/distributedcomplianceledger/vendorinfo/genesis.proto + * @version version not set + */ +export class Api extends HttpClient { + /** + * No description + * + * @tags Query + * @name QueryVendorInfoAll + * @summary Queries a list of vendorInfo items. + * @request GET:/dcl/vendorinfo/vendors + */ + queryVendorInfoAll = ( + query?: { + "pagination.key"?: string; + "pagination.offset"?: string; + "pagination.limit"?: string; + "pagination.count_total"?: boolean; + "pagination.reverse"?: boolean; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/dcl/vendorinfo/vendors`, + method: "GET", + query: query, + format: "json", + ...params, + }); + + /** + * No description + * + * @tags Query + * @name QueryVendorInfo + * @summary Queries a vendorInfo by index. + * @request GET:/dcl/vendorinfo/vendors/{vendorID} + */ + queryVendorInfo = (vendorId: number, params: RequestParams = {}) => + this.request({ + path: `/dcl/vendorinfo/vendors/${vendorId}`, + method: "GET", + format: "json", + ...params, + }); +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types.ts new file mode 100755 index 000000000..986e352ac --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types.ts @@ -0,0 +1,7 @@ +import { VendorInfo } from "./types/zigbeealliance/distributedcomplianceledger/vendorinfo/vendor_info" + + +export { + VendorInfo, + + } \ No newline at end of file diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/cosmos/base/query/v1beta1/pagination.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/cosmos/base/query/v1beta1/pagination.ts new file mode 100644 index 000000000..a31ca249d --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/cosmos/base/query/v1beta1/pagination.ts @@ -0,0 +1,286 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos.base.query.v1beta1"; + +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset: number; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: number; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + countTotal: boolean; + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse: boolean; +} + +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total: number; +} + +function createBasePageRequest(): PageRequest { + return { key: new Uint8Array(), offset: 0, limit: 0, countTotal: false, reverse: false }; +} + +export const PageRequest = { + encode(message: PageRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.offset !== 0) { + writer.uint32(16).uint64(message.offset); + } + if (message.limit !== 0) { + writer.uint32(24).uint64(message.limit); + } + if (message.countTotal === true) { + writer.uint32(32).bool(message.countTotal); + } + if (message.reverse === true) { + writer.uint32(40).bool(message.reverse); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + case 2: + message.offset = longToNumber(reader.uint64() as Long); + break; + case 3: + message.limit = longToNumber(reader.uint64() as Long); + break; + case 4: + message.countTotal = reader.bool(); + break; + case 5: + message.reverse = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageRequest { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + offset: isSet(object.offset) ? Number(object.offset) : 0, + limit: isSet(object.limit) ? Number(object.limit) : 0, + countTotal: isSet(object.countTotal) ? Boolean(object.countTotal) : false, + reverse: isSet(object.reverse) ? Boolean(object.reverse) : false, + }; + }, + + toJSON(message: PageRequest): unknown { + const obj: any = {}; + message.key !== undefined + && (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); + message.offset !== undefined && (obj.offset = Math.round(message.offset)); + message.limit !== undefined && (obj.limit = Math.round(message.limit)); + message.countTotal !== undefined && (obj.countTotal = message.countTotal); + message.reverse !== undefined && (obj.reverse = message.reverse); + return obj; + }, + + fromPartial, I>>(object: I): PageRequest { + const message = createBasePageRequest(); + message.key = object.key ?? new Uint8Array(); + message.offset = object.offset ?? 0; + message.limit = object.limit ?? 0; + message.countTotal = object.countTotal ?? false; + message.reverse = object.reverse ?? false; + return message; + }, +}; + +function createBasePageResponse(): PageResponse { + return { nextKey: new Uint8Array(), total: 0 }; +} + +export const PageResponse = { + encode(message: PageResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.nextKey.length !== 0) { + writer.uint32(10).bytes(message.nextKey); + } + if (message.total !== 0) { + writer.uint32(16).uint64(message.total); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PageResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nextKey = reader.bytes(); + break; + case 2: + message.total = longToNumber(reader.uint64() as Long); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): PageResponse { + return { + nextKey: isSet(object.nextKey) ? bytesFromBase64(object.nextKey) : new Uint8Array(), + total: isSet(object.total) ? Number(object.total) : 0, + }; + }, + + toJSON(message: PageResponse): unknown { + const obj: any = {}; + message.nextKey !== undefined + && (obj.nextKey = base64FromBytes(message.nextKey !== undefined ? message.nextKey : new Uint8Array())); + message.total !== undefined && (obj.total = Math.round(message.total)); + return obj; + }, + + fromPartial, I>>(object: I): PageResponse { + const message = createBasePageResponse(); + message.nextKey = object.nextKey ?? new Uint8Array(); + message.total = object.total ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/cosmos_proto/cosmos.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/cosmos_proto/cosmos.ts new file mode 100644 index 000000000..79c8d3486 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/cosmos_proto/cosmos.ts @@ -0,0 +1,247 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "cosmos_proto"; + +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} + +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case "SCALAR_TYPE_UNSPECIFIED": + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case "SCALAR_TYPE_STRING": + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case "SCALAR_TYPE_BYTES": + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case "UNRECOGNIZED": + default: + return ScalarType.UNRECOGNIZED; + } +} + +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return "SCALAR_TYPE_UNSPECIFIED"; + case ScalarType.SCALAR_TYPE_STRING: + return "SCALAR_TYPE_STRING"; + case ScalarType.SCALAR_TYPE_BYTES: + return "SCALAR_TYPE_BYTES"; + case ScalarType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} + +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} + +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { name: "", description: "" }; +} + +export const InterfaceDescriptor = { + encode(message: InterfaceDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + }; + }, + + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + return obj; + }, + + fromPartial, I>>(object: I): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + return message; + }, +}; + +function createBaseScalarDescriptor(): ScalarDescriptor { + return { name: "", description: "", fieldType: [] }; +} + +export const ScalarDescriptor = { + encode(message: ScalarDescriptor, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.description !== "") { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : "", + description: isSet(object.description) ? String(object.description) : "", + fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) : [], + }; + }, + + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ""; + message.description = object.description ?? ""; + message.fieldType = object.fieldType?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/gogoproto/gogo.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/gogoproto/gogo.ts new file mode 100644 index 000000000..3f41a047a --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/gogoproto/gogo.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "gogoproto"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/google/api/annotations.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/google/api/annotations.ts new file mode 100644 index 000000000..aace4787f --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/google/api/annotations.ts @@ -0,0 +1,2 @@ +/* eslint-disable */ +export const protobufPackage = "google.api"; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/google/api/http.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/google/api/http.ts new file mode 100644 index 000000000..17e770d15 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/google/api/http.ts @@ -0,0 +1,589 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.api"; + +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parmeters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} + +/** + * `HttpRule` defines the mapping of an RPC method to one or more HTTP + * REST API methods. The mapping specifies how different portions of the RPC + * request message are mapped to URL path, URL query parameters, and + * HTTP request body. The mapping is typically specified as an + * `google.api.http` annotation on the RPC method, + * see "google/api/annotations.proto" for details. + * + * The mapping consists of a field specifying the path template and + * method kind. The path template can refer to fields in the request + * message, as in the example below which describes a REST GET + * operation on a resource collection of messages: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * SubMessage sub = 2; // `sub.subfield` is url-mapped + * } + * message Message { + * string text = 1; // content of the resource + * } + * + * The same http annotation can alternatively be expressed inside the + * `GRPC API Configuration` YAML file. + * + * http: + * rules: + * - selector: .Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * This definition enables an automatic, bidrectional mapping of HTTP + * JSON to RPC. Example: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" sub: SubMessage(subfield: "foo"))` + * + * In general, not only fields but also field paths can be referenced + * from a path pattern. Fields mapped to the path pattern cannot be + * repeated and must have a primitive (non-message) type. + * + * Any fields in the request message which are not bound by the path + * pattern automatically become (optional) HTTP query + * parameters. Assume the following definition of the request message: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http).get = "/v1/messages/{message_id}"; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // mapped to the URL + * int64 revision = 2; // becomes a parameter + * SubMessage sub = 3; // `sub.subfield` becomes a parameter + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: "foo"))` + * + * Note that fields which are mapped to HTTP parameters must have a + * primitive type or a repeated primitive type. Message types are not + * allowed. In the case of a repeated type, the parameter can be + * repeated in the URL, as in `...?param=A¶m=B`. + * + * For HTTP method kinds which allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * put: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | RPC + * -----|----- + * `PUT /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice of + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC + * mappings: + * + * HTTP | RPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: "123456")` + * + * # Rules for HTTP mapping + * + * The rules for mapping HTTP path, query parameters, and body fields + * to the request message are as follows: + * + * 1. The `body` field specifies either `*` or a field path, or is + * omitted. If omitted, it indicates there is no HTTP request body. + * 2. Leaf fields (recursive expansion of nested messages in the + * request) can be classified into three types: + * (a) Matched in the URL template. + * (b) Covered by body (if body is `*`, everything except (a) fields; + * else everything under the body field) + * (c) All other fields. + * 3. URL query parameters found in the HTTP request are mapped to (c) fields. + * 4. Any body sent with an HTTP request can contain only (b) fields. + * + * The syntax of the path template is as follows: + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single path segment. The syntax `**` matches zero + * or more path segments, which must be the last part of the path except the + * `Verb`. The syntax `LITERAL` matches literal text in the path. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path, all characters + * except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up in the + * Discovery Document as `{var}`. + * + * If a variable contains one or more path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path, all + * characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such variables + * show up in the Discovery Document as `{+var}`. + * + * NOTE: While the single segment variable matches the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 + * Simple String Expansion, the multi segment variable **does not** match + * RFC 6570 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. + * + * NOTE: the field paths in variables and in the `body` must not refer to + * repeated fields or map fields. + */ +export interface HttpRule { + /** + * Selects methods to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + */ + selector: string; + /** Used for listing and getting information about resources. */ + get: + | string + | undefined; + /** Used for updating a resource. */ + put: + | string + | undefined; + /** Used for creating a resource. */ + post: + | string + | undefined; + /** Used for deleting a resource. */ + delete: + | string + | undefined; + /** Used for updating a resource. */ + patch: + | string + | undefined; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom: + | CustomHttpPattern + | undefined; + /** + * The name of the request field whose value is mapped to the HTTP body, or + * `*` for mapping all fields not captured by the path pattern to the HTTP + * body. NOTE: the referred field must not be a repeated field and must be + * present at the top-level of request message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * body of response. Other response fields are ignored. When + * not set, the response message will be used as HTTP body of response. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} + +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} + +function createBaseHttp(): Http { + return { rules: [], fullyDecodeReservedExpansion: false }; +} + +export const Http = { + encode(message: Http, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): Http { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined + && (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + + fromPartial, I>>(object: I): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false; + return message; + }, +}; + +function createBaseHttpRule(): HttpRule { + return { + selector: "", + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: "", + responseBody: "", + additionalBindings: [], + }; +} + +export const HttpRule = { + encode(message: HttpRule, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.selector !== "") { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode(message.custom, writer.uint32(66).fork()).ldelim(); + } + if (message.body !== "") { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== "") { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): HttpRule { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push(HttpRule.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : "", + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) ? CustomHttpPattern.fromJSON(object.custom) : undefined, + body: isSet(object.body) ? String(object.body) : "", + responseBody: isSet(object.responseBody) ? String(object.responseBody) : "", + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined + && (obj.custom = message.custom ? CustomHttpPattern.toJSON(message.custom) : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map((e) => e ? HttpRule.toJSON(e) : undefined); + } else { + obj.additionalBindings = []; + } + return obj; + }, + + fromPartial, I>>(object: I): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ""; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = (object.custom !== undefined && object.custom !== null) + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ""; + message.responseBody = object.responseBody ?? ""; + message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { kind: "", path: "" }; +} + +export const CustomHttpPattern = { + encode(message: CustomHttpPattern, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.kind !== "") { + writer.uint32(10).string(message.kind); + } + if (message.path !== "") { + writer.uint32(18).string(message.path); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): CustomHttpPattern { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): CustomHttpPattern { + return { kind: isSet(object.kind) ? String(object.kind) : "", path: isSet(object.path) ? String(object.path) : "" }; + }, + + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + + fromPartial, I>>(object: I): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ""; + message.path = object.path ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/google/protobuf/descriptor.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/google/protobuf/descriptor.ts new file mode 100644 index 000000000..8fb7bb24c --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/google/protobuf/descriptor.ts @@ -0,0 +1,3753 @@ +/* eslint-disable */ +import Long from "long"; +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "google.protobuf"; + +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} + +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + /** e.g. "foo", "foo.bar", etc. */ + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options: + | FileOptions + | undefined; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo: + | SourceCodeInfo + | undefined; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} + +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options: MessageOptions | undefined; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} + +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options: ExtensionRangeOptions | undefined; +} + +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} + +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options: + | FieldOptions + | undefined; + /** + * If true, this is a proto3 "optional". When a proto3 field is optional, it + * tracks presence regardless of field type. + * + * When proto3_optional is true, this field must be belong to a oneof to + * signal to old proto3 clients that presence is tracked for this field. This + * oneof is known as a "synthetic" oneof, and this field must be its sole + * member (each proto3 optional field gets its own synthetic oneof). Synthetic + * oneofs exist in the descriptor only, and do not generate any API. Synthetic + * oneofs must be ordered after all "real" oneofs. + * + * For message fields, proto3_optional doesn't create any semantic change, + * since non-repeated message fields always track presence. However it still + * indicates the semantic detail of whether the user wrote "optional" or not. + * This can be useful for round-tripping the .proto file. For consistency we + * give message fields a synthetic oneof also, even though it is not required + * to track presence. This is especially important because the parser can't + * tell if a field is a message or an enum, so it must always create a + * synthetic oneof. + * + * Proto2 optional fields do not set this flag, because they already indicate + * optional with `LABEL_OPTIONAL`. + */ + proto3Optional: boolean; +} + +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + /** TYPE_MESSAGE - Length-delimited aggregate. */ + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} + +export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case "UNRECOGNIZED": + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} + +export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options: OneofOptions | undefined; +} + +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options: + | EnumOptions + | undefined; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} + +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} + +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options: EnumValueOptions | undefined; +} + +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options: ServiceOptions | undefined; +} + +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options: + | MethodOptions + | undefined; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} + +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * Controls the name of the wrapper Java class generated for the .proto file. + * That class will always contain the .proto file's getDescriptor() method as + * well as any top-level extensions defined in the .proto file. + * If java_multiple_files is disabled, then all the other classes from the + * .proto file will be nested inside the single wrapper outer class. + */ + javaOuterClassname: string; + /** + * If enabled, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the wrapper class + * named by java_outer_classname. However, the wrapper class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** + * This option does nothing. + * + * @deprecated + */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} + +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** SPEED - Generate complete code for parsing, serialization, */ + SPEED = 1, + /** CODE_SIZE - etc. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} + +export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case "UNRECOGNIZED": + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} + +export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} + +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + case FieldOptions_CType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} + +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + case -1: + case "UNRECOGNIZED": + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} + +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} + +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} + +export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case "UNRECOGNIZED": + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} + +export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: number; + negativeIntValue: number; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} + +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} + +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} + +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. * / + * /* Block comment attached to + * * grault. * / + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} + +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} + +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} + +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { file: [] }; +} + +export const FileDescriptorSet = { + encode(message: FileDescriptorSet, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorSet { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push(FileDescriptorProto.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorSet { + return { file: Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [] }; + }, + + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map((e) => e ? FileDescriptorProto.toJSON(e) : undefined); + } else { + obj.file = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = object.file?.map((e) => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: "", + package: "", + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: "", + }; +} + +export const FileDescriptorProto = { + encode(message: FileDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.package !== "") { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(74).fork()).ldelim(); + } + if (message.syntax !== "") { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.service.push(ServiceDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode(reader, reader.uint32()); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + package: isSet(object.package) ? String(object.package) : "", + dependency: Array.isArray(object?.dependency) ? object.dependency.map((e: any) => String(e)) : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => Number(e)) : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + service: Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : "", + }; + }, + + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map((e) => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map((e) => e ? ServiceDescriptorProto.toJSON(e) : undefined); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + message.options !== undefined && (obj.options = message.options ? FileOptions.toJSON(message.options) : undefined); + message.sourceCodeInfo !== undefined + && (obj.sourceCodeInfo = message.sourceCodeInfo ? SourceCodeInfo.toJSON(message.sourceCodeInfo) : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + + fromPartial, I>>(object: I): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ""; + message.package = object.package ?? ""; + message.dependency = object.dependency?.map((e) => e) || []; + message.publicDependency = object.publicDependency?.map((e) => e) || []; + message.weakDependency = object.weakDependency?.map((e) => e) || []; + message.messageType = object.messageType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.service = object.service?.map((e) => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = (object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null) + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ""; + return message; + }, +}; + +function createBaseDescriptorProto(): DescriptorProto { + return { + name: "", + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} + +export const DescriptorProto = { + encode(message: DescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode(v!, writer.uint32(74).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 6: + message.extension.push(FieldDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.nestedType.push(DescriptorProto.decode(reader, reader.uint32())); + break; + case 4: + message.enumType.push(EnumDescriptorProto.decode(reader, reader.uint32())); + break; + case 5: + message.extensionRange.push(DescriptorProto_ExtensionRange.decode(reader, reader.uint32())); + break; + case 8: + message.oneofDecl.push(OneofDescriptorProto.decode(reader, reader.uint32())); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push(DescriptorProto_ReservedRange.decode(reader, reader.uint32())); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + field: Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map((e) => e ? FieldDescriptorProto.toJSON(e) : undefined); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map((e) => e ? DescriptorProto.toJSON(e) : undefined); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map((e) => e ? EnumDescriptorProto.toJSON(e) : undefined); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map((e) => e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map((e) => e ? OneofDescriptorProto.toJSON(e) : undefined); + } else { + obj.oneofDecl = []; + } + message.options !== undefined + && (obj.options = message.options ? MessageOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => e ? DescriptorProto_ReservedRange.toJSON(e) : undefined); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ""; + message.field = object.field?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = object.extension?.map((e) => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = object.nestedType?.map((e) => DescriptorProto.fromPartial(e)) || []; + message.enumType = object.enumType?.map((e) => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = object.extensionRange?.map((e) => DescriptorProto_ExtensionRange.fromPartial(e)) || []; + message.oneofDecl = object.oneofDecl?.map((e) => OneofDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => DescriptorProto_ReservedRange.fromPartial(e)) || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { start: 0, end: 0, options: undefined }; +} + +export const DescriptorProto_ExtensionRange = { + encode(message: DescriptorProto_ExtensionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ExtensionRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined + && (obj.options = message.options ? ExtensionRangeOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { start: 0, end: 0 }; +} + +export const DescriptorProto_ReservedRange = { + encode(message: DescriptorProto_ReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): DescriptorProto_ReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): DescriptorProto_ReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { uninterpretedOption: [] }; +} + +export const ExtensionRangeOptions = { + encode(message: ExtensionRangeOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ExtensionRangeOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: "", + number: 0, + label: 1, + type: 1, + typeName: "", + extendee: "", + defaultValue: "", + oneofIndex: 0, + jsonName: "", + options: undefined, + proto3Optional: false, + }; +} + +export const FieldDescriptorProto = { + encode(message: FieldDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== "") { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== "") { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== "") { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== "") { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.proto3Optional === true) { + writer.uint32(136).bool(message.proto3Optional); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + case 17: + message.proto3Optional = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? String(object.typeName) : "", + extendee: isSet(object.extendee) ? String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : "", + options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? Boolean(object.proto3Optional) : false, + }; + }, + + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && (obj.options = message.options ? FieldOptions.toJSON(message.options) : undefined); + message.proto3Optional !== undefined && (obj.proto3Optional = message.proto3Optional); + return obj; + }, + + fromPartial, I>>(object: I): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ""; + message.extendee = object.extendee ?? ""; + message.defaultValue = object.defaultValue ?? ""; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? FieldOptions.fromPartial(object.options) + : undefined; + message.proto3Optional = object.proto3Optional ?? false; + return message; + }, +}; + +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { name: "", options: undefined }; +} + +export const OneofDescriptorProto = { + encode(message: OneofDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && (obj.options = message.options ? OneofOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { name: "", value: [], options: undefined, reservedRange: [], reservedName: [] }; +} + +export const EnumDescriptorProto = { + encode(message: EnumDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push(EnumValueDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push(EnumDescriptorProto_EnumReservedRange.decode(reader, reader.uint32())); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + value: Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => String(e)) : [], + }; + }, + + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map((e) => e ? EnumValueDescriptorProto.toJSON(e) : undefined); + } else { + obj.value = []; + } + message.options !== undefined && (obj.options = message.options ? EnumOptions.toJSON(message.options) : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map((e) => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map((e) => e); + } else { + obj.reservedName = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ""; + message.value = object.value?.map((e) => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = object.reservedRange?.map((e) => EnumDescriptorProto_EnumReservedRange.fromPartial(e)) + || []; + message.reservedName = object.reservedName?.map((e) => e) || []; + return message; + }, +}; + +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { start: 0, end: 0 }; +} + +export const EnumDescriptorProto_EnumReservedRange = { + encode(message: EnumDescriptorProto_EnumReservedRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumDescriptorProto_EnumReservedRange { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + }, + + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>( + object: I, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { name: "", number: 0, options: undefined }; +} + +export const EnumValueDescriptorProto = { + encode(message: EnumValueDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined + && (obj.options = message.options ? EnumValueOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ""; + message.number = object.number ?? 0; + message.options = (object.options !== undefined && object.options !== null) + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { name: "", method: [], options: undefined }; +} + +export const ServiceDescriptorProto = { + encode(message: ServiceDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push(MethodDescriptorProto.decode(reader, reader.uint32())); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + method: Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], + options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map((e) => e ? MethodDescriptorProto.toJSON(e) : undefined); + } else { + obj.method = []; + } + message.options !== undefined + && (obj.options = message.options ? ServiceOptions.toJSON(message.options) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ""; + message.method = object.method?.map((e) => MethodDescriptorProto.fromPartial(e)) || []; + message.options = (object.options !== undefined && object.options !== null) + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, +}; + +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: "", + inputType: "", + outputType: "", + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} + +export const MethodDescriptorProto = { + encode(message: MethodDescriptorProto, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.inputType !== "") { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== "") { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodDescriptorProto { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : "", + inputType: isSet(object.inputType) ? String(object.inputType) : "", + outputType: isSet(object.outputType) ? String(object.outputType) : "", + options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? Boolean(object.serverStreaming) : false, + }; + }, + + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined + && (obj.options = message.options ? MethodOptions.toJSON(message.options) : undefined); + message.clientStreaming !== undefined && (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && (obj.serverStreaming = message.serverStreaming); + return obj; + }, + + fromPartial, I>>(object: I): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ""; + message.inputType = object.inputType ?? ""; + message.outputType = object.outputType ?? ""; + message.options = (object.options !== undefined && object.options !== null) + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, +}; + +function createBaseFileOptions(): FileOptions { + return { + javaPackage: "", + javaOuterClassname: "", + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: "", + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: "", + csharpNamespace: "", + swiftPrefix: "", + phpClassPrefix: "", + phpNamespace: "", + phpMetadataNamespace: "", + rubyPackage: "", + uninterpretedOption: [], + }; +} + +export const FileOptions = { + encode(message: FileOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.javaPackage !== "") { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== "") { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== "") { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== "") { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== "") { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== "") { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== "") { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== "") { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== "") { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== "") { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? Boolean(object.pyGenericServices) : false, + phpGenericServices: isSet(object.phpGenericServices) ? Boolean(object.phpGenericServices) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? Boolean(object.ccEnableArenas) : false, + objcClassPrefix: isSet(object.objcClassPrefix) ? String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : "", + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined + && (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ""; + message.javaOuterClassname = object.javaOuterClassname ?? ""; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ""; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ""; + message.csharpNamespace = object.csharpNamespace ?? ""; + message.swiftPrefix = object.swiftPrefix ?? ""; + message.phpClassPrefix = object.phpClassPrefix ?? ""; + message.phpNamespace = object.phpNamespace ?? ""; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ""; + message.rubyPackage = object.rubyPackage ?? ""; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} + +export const MessageOptions = { + encode(message: MessageOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MessageOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) ? Boolean(object.messageSetWireFormat) : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined + && (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseFieldOptions(): FieldOptions { + return { ctype: 0, packed: false, jstype: 0, lazy: false, deprecated: false, weak: false, uninterpretedOption: [] }; +} + +export const FieldOptions = { + encode(message: FieldOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.ctype !== 0) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 0) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FieldOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 0; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 0; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseOneofOptions(): OneofOptions { + return { uninterpretedOption: [] }; +} + +export const OneofOptions = { + encode(message: OneofOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): OneofOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumOptions(): EnumOptions { + return { allowAlias: false, deprecated: false, uninterpretedOption: [] }; +} + +export const EnumOptions = { + encode(message: EnumOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEnumValueOptions(): EnumValueOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const EnumValueOptions = { + encode(message: EnumValueOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): EnumValueOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseServiceOptions(): ServiceOptions { + return { deprecated: false, uninterpretedOption: [] }; +} + +export const ServiceOptions = { + encode(message: ServiceOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ServiceOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseMethodOptions(): MethodOptions { + return { deprecated: false, idempotencyLevel: 0, uninterpretedOption: [] }; +} + +export const MethodOptions = { + encode(message: MethodOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 0) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MethodOptions { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push(UninterpretedOption.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) + : [], + }; + }, + + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined + && (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel)); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => e ? UninterpretedOption.toJSON(e) : undefined); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + + fromPartial, I>>(object: I): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 0; + message.uninterpretedOption = object.uninterpretedOption?.map((e) => UninterpretedOption.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: "", + positiveIntValue: 0, + negativeIntValue: 0, + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: "", + }; +} + +export const UninterpretedOption = { + encode(message: UninterpretedOption, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.name) { + UninterpretedOption_NamePart.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.identifierValue !== "") { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== 0) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== 0) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== "") { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push(UninterpretedOption_NamePart.decode(reader, reader.uint32())); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = longToNumber(reader.uint64() as Long); + break; + case 5: + message.negativeIntValue = longToNumber(reader.int64() as Long); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], + identifierValue: isSet(object.identifierValue) ? String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? Number(object.positiveIntValue) : 0, + negativeIntValue: isSet(object.negativeIntValue) ? Number(object.negativeIntValue) : 0, + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? bytesFromBase64(object.stringValue) : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) ? String(object.aggregateValue) : "", + }; + }, + + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map((e) => e ? UninterpretedOption_NamePart.toJSON(e) : undefined); + } else { + obj.name = []; + } + message.identifierValue !== undefined && (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && (obj.positiveIntValue = Math.round(message.positiveIntValue)); + message.negativeIntValue !== undefined && (obj.negativeIntValue = Math.round(message.negativeIntValue)); + message.doubleValue !== undefined && (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined + && (obj.stringValue = base64FromBytes( + message.stringValue !== undefined ? message.stringValue : new Uint8Array(), + )); + message.aggregateValue !== undefined && (obj.aggregateValue = message.aggregateValue); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = object.name?.map((e) => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ""; + message.positiveIntValue = object.positiveIntValue ?? 0; + message.negativeIntValue = object.negativeIntValue ?? 0; + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ""; + return message; + }, +}; + +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { namePart: "", isExtension: false }; +} + +export const UninterpretedOption_NamePart = { + encode(message: UninterpretedOption_NamePart, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.namePart !== "") { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): UninterpretedOption_NamePart { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? Boolean(object.isExtension) : false, + }; + }, + + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && (obj.isExtension = message.isExtension); + return obj; + }, + + fromPartial, I>>(object: I): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ""; + message.isExtension = object.isExtension ?? false; + return message; + }, +}; + +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { location: [] }; +} + +export const SourceCodeInfo = { + encode(message: SourceCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push(SourceCodeInfo_Location.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map((e) => e ? SourceCodeInfo_Location.toJSON(e) : undefined); + } else { + obj.location = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = object.location?.map((e) => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { path: [], span: [], leadingComments: "", trailingComments: "", leadingDetachedComments: [] }; +} + +export const SourceCodeInfo_Location = { + encode(message: SourceCodeInfo_Location, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== "") { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== "") { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): SourceCodeInfo_Location { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + span: Array.isArray(object?.span) ? object.span.map((e: any) => Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? String(object.trailingComments) : "", + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map((e) => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map((e) => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + + fromPartial, I>>(object: I): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map((e) => e) || []; + message.span = object.span?.map((e) => e) || []; + message.leadingComments = object.leadingComments ?? ""; + message.trailingComments = object.trailingComments ?? ""; + message.leadingDetachedComments = object.leadingDetachedComments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { annotation: [] }; +} + +export const GeneratedCodeInfo = { + encode(message: GeneratedCodeInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push(GeneratedCodeInfo_Annotation.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map((e) => e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined); + } else { + obj.annotation = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = object.annotation?.map((e) => GeneratedCodeInfo_Annotation.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { path: [], sourceFile: "", begin: 0, end: 0 }; +} + +export const GeneratedCodeInfo_Annotation = { + encode(message: GeneratedCodeInfo_Annotation, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== "") { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GeneratedCodeInfo_Annotation { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) ? object.path.map((e: any) => Number(e)) : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : "", + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map((e) => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + + fromPartial, I>>(object: I): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map((e) => e) || []; + message.sourceFile = object.sourceFile ?? ""; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, +}; + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== "undefined") { + return globalThis; + } + if (typeof self !== "undefined") { + return self; + } + if (typeof window !== "undefined") { + return window; + } + if (typeof global !== "undefined") { + return global; + } + throw "Unable to locate global object"; +})(); + +function bytesFromBase64(b64: string): Uint8Array { + if (globalThis.Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if (globalThis.Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function longToNumber(long: Long): number { + if (long.gt(Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/zigbeealliance/distributedcomplianceledger/vendorinfo/genesis.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/zigbeealliance/distributedcomplianceledger/vendorinfo/genesis.ts new file mode 100644 index 000000000..f40ba1ee6 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/zigbeealliance/distributedcomplianceledger/vendorinfo/genesis.ts @@ -0,0 +1,77 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { VendorInfo } from "./vendor_info"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.vendorinfo"; + +/** GenesisState defines the vendorinfo module's genesis state. */ +export interface GenesisState { + /** this line is used by starport scaffolding # genesis/proto/state */ + vendorInfoList: VendorInfo[]; +} + +function createBaseGenesisState(): GenesisState { + return { vendorInfoList: [] }; +} + +export const GenesisState = { + encode(message: GenesisState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.vendorInfoList) { + VendorInfo.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GenesisState { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vendorInfoList.push(VendorInfo.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): GenesisState { + return { + vendorInfoList: Array.isArray(object?.vendorInfoList) + ? object.vendorInfoList.map((e: any) => VendorInfo.fromJSON(e)) + : [], + }; + }, + + toJSON(message: GenesisState): unknown { + const obj: any = {}; + if (message.vendorInfoList) { + obj.vendorInfoList = message.vendorInfoList.map((e) => e ? VendorInfo.toJSON(e) : undefined); + } else { + obj.vendorInfoList = []; + } + return obj; + }, + + fromPartial, I>>(object: I): GenesisState { + const message = createBaseGenesisState(); + message.vendorInfoList = object.vendorInfoList?.map((e) => VendorInfo.fromPartial(e)) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/zigbeealliance/distributedcomplianceledger/vendorinfo/query.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/zigbeealliance/distributedcomplianceledger/vendorinfo/query.ts new file mode 100644 index 000000000..f03084335 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/zigbeealliance/distributedcomplianceledger/vendorinfo/query.ts @@ -0,0 +1,286 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; +import { PageRequest, PageResponse } from "../../../cosmos/base/query/v1beta1/pagination"; +import { VendorInfo } from "./vendor_info"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.vendorinfo"; + +export interface QueryGetVendorInfoRequest { + vendorID: number; +} + +export interface QueryGetVendorInfoResponse { + vendorInfo: VendorInfo | undefined; +} + +export interface QueryAllVendorInfoRequest { + pagination: PageRequest | undefined; +} + +export interface QueryAllVendorInfoResponse { + vendorInfo: VendorInfo[]; + pagination: PageResponse | undefined; +} + +function createBaseQueryGetVendorInfoRequest(): QueryGetVendorInfoRequest { + return { vendorID: 0 }; +} + +export const QueryGetVendorInfoRequest = { + encode(message: QueryGetVendorInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vendorID !== 0) { + writer.uint32(8).int32(message.vendorID); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetVendorInfoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetVendorInfoRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vendorID = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetVendorInfoRequest { + return { vendorID: isSet(object.vendorID) ? Number(object.vendorID) : 0 }; + }, + + toJSON(message: QueryGetVendorInfoRequest): unknown { + const obj: any = {}; + message.vendorID !== undefined && (obj.vendorID = Math.round(message.vendorID)); + return obj; + }, + + fromPartial, I>>(object: I): QueryGetVendorInfoRequest { + const message = createBaseQueryGetVendorInfoRequest(); + message.vendorID = object.vendorID ?? 0; + return message; + }, +}; + +function createBaseQueryGetVendorInfoResponse(): QueryGetVendorInfoResponse { + return { vendorInfo: undefined }; +} + +export const QueryGetVendorInfoResponse = { + encode(message: QueryGetVendorInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vendorInfo !== undefined) { + VendorInfo.encode(message.vendorInfo, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryGetVendorInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryGetVendorInfoResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vendorInfo = VendorInfo.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryGetVendorInfoResponse { + return { vendorInfo: isSet(object.vendorInfo) ? VendorInfo.fromJSON(object.vendorInfo) : undefined }; + }, + + toJSON(message: QueryGetVendorInfoResponse): unknown { + const obj: any = {}; + message.vendorInfo !== undefined + && (obj.vendorInfo = message.vendorInfo ? VendorInfo.toJSON(message.vendorInfo) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryGetVendorInfoResponse { + const message = createBaseQueryGetVendorInfoResponse(); + message.vendorInfo = (object.vendorInfo !== undefined && object.vendorInfo !== null) + ? VendorInfo.fromPartial(object.vendorInfo) + : undefined; + return message; + }, +}; + +function createBaseQueryAllVendorInfoRequest(): QueryAllVendorInfoRequest { + return { pagination: undefined }; +} + +export const QueryAllVendorInfoRequest = { + encode(message: QueryAllVendorInfoRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllVendorInfoRequest { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllVendorInfoRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllVendorInfoRequest { + return { pagination: isSet(object.pagination) ? PageRequest.fromJSON(object.pagination) : undefined }; + }, + + toJSON(message: QueryAllVendorInfoRequest): unknown { + const obj: any = {}; + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageRequest.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryAllVendorInfoRequest { + const message = createBaseQueryAllVendorInfoRequest(); + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +function createBaseQueryAllVendorInfoResponse(): QueryAllVendorInfoResponse { + return { vendorInfo: [], pagination: undefined }; +} + +export const QueryAllVendorInfoResponse = { + encode(message: QueryAllVendorInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + for (const v of message.vendorInfo) { + VendorInfo.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.pagination !== undefined) { + PageResponse.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): QueryAllVendorInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryAllVendorInfoResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vendorInfo.push(VendorInfo.decode(reader, reader.uint32())); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): QueryAllVendorInfoResponse { + return { + vendorInfo: Array.isArray(object?.vendorInfo) ? object.vendorInfo.map((e: any) => VendorInfo.fromJSON(e)) : [], + pagination: isSet(object.pagination) ? PageResponse.fromJSON(object.pagination) : undefined, + }; + }, + + toJSON(message: QueryAllVendorInfoResponse): unknown { + const obj: any = {}; + if (message.vendorInfo) { + obj.vendorInfo = message.vendorInfo.map((e) => e ? VendorInfo.toJSON(e) : undefined); + } else { + obj.vendorInfo = []; + } + message.pagination !== undefined + && (obj.pagination = message.pagination ? PageResponse.toJSON(message.pagination) : undefined); + return obj; + }, + + fromPartial, I>>(object: I): QueryAllVendorInfoResponse { + const message = createBaseQueryAllVendorInfoResponse(); + message.vendorInfo = object.vendorInfo?.map((e) => VendorInfo.fromPartial(e)) || []; + message.pagination = (object.pagination !== undefined && object.pagination !== null) + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, +}; + +/** Query defines the gRPC querier service. */ +export interface Query { + /** Queries a vendorInfo by index. */ + VendorInfo(request: QueryGetVendorInfoRequest): Promise; + /** Queries a list of vendorInfo items. */ + VendorInfoAll(request: QueryAllVendorInfoRequest): Promise; +} + +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.VendorInfo = this.VendorInfo.bind(this); + this.VendorInfoAll = this.VendorInfoAll.bind(this); + } + VendorInfo(request: QueryGetVendorInfoRequest): Promise { + const data = QueryGetVendorInfoRequest.encode(request).finish(); + const promise = this.rpc.request("zigbeealliance.distributedcomplianceledger.vendorinfo.Query", "VendorInfo", data); + return promise.then((data) => QueryGetVendorInfoResponse.decode(new _m0.Reader(data))); + } + + VendorInfoAll(request: QueryAllVendorInfoRequest): Promise { + const data = QueryAllVendorInfoRequest.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.vendorinfo.Query", + "VendorInfoAll", + data, + ); + return promise.then((data) => QueryAllVendorInfoResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/zigbeealliance/distributedcomplianceledger/vendorinfo/tx.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/zigbeealliance/distributedcomplianceledger/vendorinfo/tx.ts new file mode 100644 index 000000000..2b0f9d243 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/zigbeealliance/distributedcomplianceledger/vendorinfo/tx.ts @@ -0,0 +1,384 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.vendorinfo"; + +export interface MsgCreateVendorInfo { + creator: string; + vendorID: number; + vendorName: string; + companyLegalName: string; + companyPreferredName: string; + vendorLandingPageURL: string; + schemaVersion: number; +} + +export interface MsgCreateVendorInfoResponse { +} + +export interface MsgUpdateVendorInfo { + creator: string; + vendorID: number; + vendorName: string; + companyLegalName: string; + companyPreferredName: string; + vendorLandingPageURL: string; + schemaVersion: number; +} + +export interface MsgUpdateVendorInfoResponse { +} + +function createBaseMsgCreateVendorInfo(): MsgCreateVendorInfo { + return { + creator: "", + vendorID: 0, + vendorName: "", + companyLegalName: "", + companyPreferredName: "", + vendorLandingPageURL: "", + schemaVersion: 0, + }; +} + +export const MsgCreateVendorInfo = { + encode(message: MsgCreateVendorInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.vendorID !== 0) { + writer.uint32(16).int32(message.vendorID); + } + if (message.vendorName !== "") { + writer.uint32(26).string(message.vendorName); + } + if (message.companyLegalName !== "") { + writer.uint32(34).string(message.companyLegalName); + } + if (message.companyPreferredName !== "") { + writer.uint32(42).string(message.companyPreferredName); + } + if (message.vendorLandingPageURL !== "") { + writer.uint32(50).string(message.vendorLandingPageURL); + } + if (message.schemaVersion !== 0) { + writer.uint32(56).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateVendorInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateVendorInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.vendorID = reader.int32(); + break; + case 3: + message.vendorName = reader.string(); + break; + case 4: + message.companyLegalName = reader.string(); + break; + case 5: + message.companyPreferredName = reader.string(); + break; + case 6: + message.vendorLandingPageURL = reader.string(); + break; + case 7: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgCreateVendorInfo { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + vendorID: isSet(object.vendorID) ? Number(object.vendorID) : 0, + vendorName: isSet(object.vendorName) ? String(object.vendorName) : "", + companyLegalName: isSet(object.companyLegalName) ? String(object.companyLegalName) : "", + companyPreferredName: isSet(object.companyPreferredName) ? String(object.companyPreferredName) : "", + vendorLandingPageURL: isSet(object.vendorLandingPageURL) ? String(object.vendorLandingPageURL) : "", + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgCreateVendorInfo): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.vendorID !== undefined && (obj.vendorID = Math.round(message.vendorID)); + message.vendorName !== undefined && (obj.vendorName = message.vendorName); + message.companyLegalName !== undefined && (obj.companyLegalName = message.companyLegalName); + message.companyPreferredName !== undefined && (obj.companyPreferredName = message.companyPreferredName); + message.vendorLandingPageURL !== undefined && (obj.vendorLandingPageURL = message.vendorLandingPageURL); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgCreateVendorInfo { + const message = createBaseMsgCreateVendorInfo(); + message.creator = object.creator ?? ""; + message.vendorID = object.vendorID ?? 0; + message.vendorName = object.vendorName ?? ""; + message.companyLegalName = object.companyLegalName ?? ""; + message.companyPreferredName = object.companyPreferredName ?? ""; + message.vendorLandingPageURL = object.vendorLandingPageURL ?? ""; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgCreateVendorInfoResponse(): MsgCreateVendorInfoResponse { + return {}; +} + +export const MsgCreateVendorInfoResponse = { + encode(_: MsgCreateVendorInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgCreateVendorInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgCreateVendorInfoResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgCreateVendorInfoResponse { + return {}; + }, + + toJSON(_: MsgCreateVendorInfoResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgCreateVendorInfoResponse { + const message = createBaseMsgCreateVendorInfoResponse(); + return message; + }, +}; + +function createBaseMsgUpdateVendorInfo(): MsgUpdateVendorInfo { + return { + creator: "", + vendorID: 0, + vendorName: "", + companyLegalName: "", + companyPreferredName: "", + vendorLandingPageURL: "", + schemaVersion: 0, + }; +} + +export const MsgUpdateVendorInfo = { + encode(message: MsgUpdateVendorInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.creator !== "") { + writer.uint32(10).string(message.creator); + } + if (message.vendorID !== 0) { + writer.uint32(16).int32(message.vendorID); + } + if (message.vendorName !== "") { + writer.uint32(26).string(message.vendorName); + } + if (message.companyLegalName !== "") { + writer.uint32(34).string(message.companyLegalName); + } + if (message.companyPreferredName !== "") { + writer.uint32(42).string(message.companyPreferredName); + } + if (message.vendorLandingPageURL !== "") { + writer.uint32(50).string(message.vendorLandingPageURL); + } + if (message.schemaVersion !== 0) { + writer.uint32(56).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateVendorInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateVendorInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.creator = reader.string(); + break; + case 2: + message.vendorID = reader.int32(); + break; + case 3: + message.vendorName = reader.string(); + break; + case 4: + message.companyLegalName = reader.string(); + break; + case 5: + message.companyPreferredName = reader.string(); + break; + case 6: + message.vendorLandingPageURL = reader.string(); + break; + case 7: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): MsgUpdateVendorInfo { + return { + creator: isSet(object.creator) ? String(object.creator) : "", + vendorID: isSet(object.vendorID) ? Number(object.vendorID) : 0, + vendorName: isSet(object.vendorName) ? String(object.vendorName) : "", + companyLegalName: isSet(object.companyLegalName) ? String(object.companyLegalName) : "", + companyPreferredName: isSet(object.companyPreferredName) ? String(object.companyPreferredName) : "", + vendorLandingPageURL: isSet(object.vendorLandingPageURL) ? String(object.vendorLandingPageURL) : "", + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: MsgUpdateVendorInfo): unknown { + const obj: any = {}; + message.creator !== undefined && (obj.creator = message.creator); + message.vendorID !== undefined && (obj.vendorID = Math.round(message.vendorID)); + message.vendorName !== undefined && (obj.vendorName = message.vendorName); + message.companyLegalName !== undefined && (obj.companyLegalName = message.companyLegalName); + message.companyPreferredName !== undefined && (obj.companyPreferredName = message.companyPreferredName); + message.vendorLandingPageURL !== undefined && (obj.vendorLandingPageURL = message.vendorLandingPageURL); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): MsgUpdateVendorInfo { + const message = createBaseMsgUpdateVendorInfo(); + message.creator = object.creator ?? ""; + message.vendorID = object.vendorID ?? 0; + message.vendorName = object.vendorName ?? ""; + message.companyLegalName = object.companyLegalName ?? ""; + message.companyPreferredName = object.companyPreferredName ?? ""; + message.vendorLandingPageURL = object.vendorLandingPageURL ?? ""; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBaseMsgUpdateVendorInfoResponse(): MsgUpdateVendorInfoResponse { + return {}; +} + +export const MsgUpdateVendorInfoResponse = { + encode(_: MsgUpdateVendorInfoResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): MsgUpdateVendorInfoResponse { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgUpdateVendorInfoResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(_: any): MsgUpdateVendorInfoResponse { + return {}; + }, + + toJSON(_: MsgUpdateVendorInfoResponse): unknown { + const obj: any = {}; + return obj; + }, + + fromPartial, I>>(_: I): MsgUpdateVendorInfoResponse { + const message = createBaseMsgUpdateVendorInfoResponse(); + return message; + }, +}; + +/** Msg defines the Msg service. */ +export interface Msg { + CreateVendorInfo(request: MsgCreateVendorInfo): Promise; + /** this line is used by starport scaffolding # proto/tx/rpc */ + UpdateVendorInfo(request: MsgUpdateVendorInfo): Promise; +} + +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.CreateVendorInfo = this.CreateVendorInfo.bind(this); + this.UpdateVendorInfo = this.UpdateVendorInfo.bind(this); + } + CreateVendorInfo(request: MsgCreateVendorInfo): Promise { + const data = MsgCreateVendorInfo.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.vendorinfo.Msg", + "CreateVendorInfo", + data, + ); + return promise.then((data) => MsgCreateVendorInfoResponse.decode(new _m0.Reader(data))); + } + + UpdateVendorInfo(request: MsgUpdateVendorInfo): Promise { + const data = MsgUpdateVendorInfo.encode(request).finish(); + const promise = this.rpc.request( + "zigbeealliance.distributedcomplianceledger.vendorinfo.Msg", + "UpdateVendorInfo", + data, + ); + return promise.then((data) => MsgUpdateVendorInfoResponse.decode(new _m0.Reader(data))); + } +} + +interface Rpc { + request(service: string, method: string, data: Uint8Array): Promise; +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/zigbeealliance/distributedcomplianceledger/vendorinfo/vendor_info.ts b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/zigbeealliance/distributedcomplianceledger/vendorinfo/vendor_info.ts new file mode 100644 index 000000000..9ed042cd4 --- /dev/null +++ b/ts-client/zigbeealliance.distributedcomplianceledger.vendorinfo/types/zigbeealliance/distributedcomplianceledger/vendorinfo/vendor_info.ts @@ -0,0 +1,140 @@ +/* eslint-disable */ +import _m0 from "protobufjs/minimal"; + +export const protobufPackage = "zigbeealliance.distributedcomplianceledger.vendorinfo"; + +export interface VendorInfo { + vendorID: number; + vendorName: string; + companyLegalName: string; + companyPreferredName: string; + vendorLandingPageURL: string; + creator: string; + schemaVersion: number; +} + +function createBaseVendorInfo(): VendorInfo { + return { + vendorID: 0, + vendorName: "", + companyLegalName: "", + companyPreferredName: "", + vendorLandingPageURL: "", + creator: "", + schemaVersion: 0, + }; +} + +export const VendorInfo = { + encode(message: VendorInfo, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.vendorID !== 0) { + writer.uint32(8).int32(message.vendorID); + } + if (message.vendorName !== "") { + writer.uint32(18).string(message.vendorName); + } + if (message.companyLegalName !== "") { + writer.uint32(26).string(message.companyLegalName); + } + if (message.companyPreferredName !== "") { + writer.uint32(34).string(message.companyPreferredName); + } + if (message.vendorLandingPageURL !== "") { + writer.uint32(42).string(message.vendorLandingPageURL); + } + if (message.creator !== "") { + writer.uint32(50).string(message.creator); + } + if (message.schemaVersion !== 0) { + writer.uint32(56).uint32(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): VendorInfo { + const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseVendorInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.vendorID = reader.int32(); + break; + case 2: + message.vendorName = reader.string(); + break; + case 3: + message.companyLegalName = reader.string(); + break; + case 4: + message.companyPreferredName = reader.string(); + break; + case 5: + message.vendorLandingPageURL = reader.string(); + break; + case 6: + message.creator = reader.string(); + break; + case 7: + message.schemaVersion = reader.uint32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + + fromJSON(object: any): VendorInfo { + return { + vendorID: isSet(object.vendorID) ? Number(object.vendorID) : 0, + vendorName: isSet(object.vendorName) ? String(object.vendorName) : "", + companyLegalName: isSet(object.companyLegalName) ? String(object.companyLegalName) : "", + companyPreferredName: isSet(object.companyPreferredName) ? String(object.companyPreferredName) : "", + vendorLandingPageURL: isSet(object.vendorLandingPageURL) ? String(object.vendorLandingPageURL) : "", + creator: isSet(object.creator) ? String(object.creator) : "", + schemaVersion: isSet(object.schemaVersion) ? Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: VendorInfo): unknown { + const obj: any = {}; + message.vendorID !== undefined && (obj.vendorID = Math.round(message.vendorID)); + message.vendorName !== undefined && (obj.vendorName = message.vendorName); + message.companyLegalName !== undefined && (obj.companyLegalName = message.companyLegalName); + message.companyPreferredName !== undefined && (obj.companyPreferredName = message.companyPreferredName); + message.vendorLandingPageURL !== undefined && (obj.vendorLandingPageURL = message.vendorLandingPageURL); + message.creator !== undefined && (obj.creator = message.creator); + message.schemaVersion !== undefined && (obj.schemaVersion = Math.round(message.schemaVersion)); + return obj; + }, + + fromPartial, I>>(object: I): VendorInfo { + const message = createBaseVendorInfo(); + message.vendorID = object.vendorID ?? 0; + message.vendorName = object.vendorName ?? ""; + message.companyLegalName = object.companyLegalName ?? ""; + message.companyPreferredName = object.companyPreferredName ?? ""; + message.vendorLandingPageURL = object.vendorLandingPageURL ?? ""; + message.creator = object.creator ?? ""; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin ? P + : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} diff --git a/types/pki/errors.go b/types/pki/errors.go index 694bf4e52..82b747d13 100644 --- a/types/pki/errors.go +++ b/types/pki/errors.go @@ -3,74 +3,76 @@ package types // DONTCOVER import ( + "cosmossdk.io/errors" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) // x/pki module sentinel errors. var ( - ErrProposedCertificateAlreadyExists = sdkerrors.Register(ModuleName, 401, "proposed certificate already exists") - ErrProposedCertificateDoesNotExist = sdkerrors.Register(ModuleName, 402, "proposed certificate does not exist") - ErrCertificateAlreadyExists = sdkerrors.Register(ModuleName, 403, "certificate already exists") - ErrCertificateDoesNotExist = sdkerrors.Register(ModuleName, 404, "certificate does not exist") - ErrProposedCertificateRevocationAlreadyExists = sdkerrors.Register(ModuleName, 405, "proposed certificate revocation already exists") - ErrProposedCertificateRevocationDoesNotExist = sdkerrors.Register(ModuleName, 406, "proposed certificate revocation does not exist") - ErrRevokedCertificateDoesNotExist = sdkerrors.Register(ModuleName, 407, "revoked certificate does not exist") - ErrInappropriateCertificateType = sdkerrors.Register(ModuleName, 408, "inappropriate certificate type") - ErrInvalidCertificate = sdkerrors.Register(ModuleName, 409, "invalid certificate") - ErrInvalidDataDigestType = sdkerrors.Register(ModuleName, 410, "invalid data digest type") - ErrInvalidRevocationType = sdkerrors.Register(ModuleName, 411, "invalid revocation type") - ErrNotEmptyPid = sdkerrors.Register(ModuleName, 412, "pid is not empty") - ErrNotEmptyVid = sdkerrors.Register(ModuleName, 413, "vid is not empty") - ErrRootCertificateIsNotSelfSigned = sdkerrors.Register(ModuleName, 414, "Root certificate is not self-signed") - ErrCRLSignerCertificatePidNotEqualRevocationPointPid = sdkerrors.Register(ModuleName, 415, "CRLSignerCertificate pid does not equal revocation point pid") - ErrCRLSignerCertificateVidNotEqualRevocationPointVid = sdkerrors.Register(ModuleName, 416, "CRLSignerCertificate vid does not equal revocation point pid") - ErrCRLSignerCertificatePidNotEqualMsgPid = sdkerrors.Register(ModuleName, 417, "CRLSignerCertificate pid does not equal message pid") - ErrCRLSignerCertificateVidNotEqualMsgVid = sdkerrors.Register(ModuleName, 418, "CRLSignerCertificate vid does not equal message vid") - ErrMessageVidNotEqualAccountVid = sdkerrors.Register(ModuleName, 419, "Message vid does not equal the account vid") - ErrNonRootCertificateSelfSigned = sdkerrors.Register(ModuleName, 420, "Intermediate or leaf certificate must not be self-signed") - ErrEmptyDataFileSize = sdkerrors.Register(ModuleName, 421, "empty data file size") - ErrEmptyDataDigest = sdkerrors.Register(ModuleName, 422, "empty data digest") - ErrEmptyDataDigestType = sdkerrors.Register(ModuleName, 423, "empty data digest type") - ErrNotEmptyDataDigestType = sdkerrors.Register(ModuleName, 424, "not empty data digest type") - ErrDataFieldPresented = sdkerrors.Register(ModuleName, 425, "one or more of DataDigest, DataDigestType, DataFileSize fields presented") - ErrWrongSubjectKeyIDFormat = sdkerrors.Register(ModuleName, 426, "wrong SubjectKeyID format") - ErrVidNotFound = sdkerrors.Register(ModuleName, 427, "vid not found") - ErrPidNotFound = sdkerrors.Register(ModuleName, 428, "pid not found") - ErrPemValuesNotEqual = sdkerrors.Register(ModuleName, 429, "pem values of certificates are not equal") - ErrPkiRevocationDistributionPointAlreadyExists = sdkerrors.Register(ModuleName, 430, "pki revocation distribution point already exists") - ErrPkiRevocationDistributionPointDoesNotExists = sdkerrors.Register(ModuleName, 431, "pki revocaition distribution point does not exist") - ErrUnsupportedOperation = sdkerrors.Register(ModuleName, 432, "unsupported operation") - ErrInvalidVidFormat = sdkerrors.Register(ModuleName, 433, "invalid vid format") - ErrInvalidPidFormat = sdkerrors.Register(ModuleName, 434, "invalid pid format") - ErrInvalidDataURLFormat = sdkerrors.Register(ModuleName, 435, "invalid data url format") - ErrCertificateVidNotEqualMsgVid = sdkerrors.Register(ModuleName, 436, "certificate's vid is not equal to the message vid") - ErrMessageVidNotEqualRootCertVid = sdkerrors.Register(ModuleName, 437, "Message vid is not equal to ledger's root certificate vid") - ErrCertNotChainedBack = sdkerrors.Register(ModuleName, 438, "Certificate is not chained back to a root certificate on DCL") - ErrCertVidNotEqualAccountVid = sdkerrors.Register(ModuleName, 439, "account's vid is not equal to certificate vid") - ErrCertVidNotEqualToRootVid = sdkerrors.Register(ModuleName, 440, "certificate's vid is not equal to vid of root certificate ") - ErrCRLSignerCertificateInvalidFormat = sdkerrors.Register(ModuleName, 441, "invalid CRLSignerCertificate certificate") - ErrInvalidAuthorityKeyIDFormat = sdkerrors.Register(ModuleName, 442, "invalid AuthorityKeyID format") + ErrProposedCertificateAlreadyExists = errors.Register(ModuleName, 401, "proposed certificate already exists") + ErrProposedCertificateDoesNotExist = errors.Register(ModuleName, 402, "proposed certificate does not exist") + ErrCertificateAlreadyExists = errors.Register(ModuleName, 403, "certificate already exists") + ErrCertificateDoesNotExist = errors.Register(ModuleName, 404, "certificate does not exist") + ErrProposedCertificateRevocationAlreadyExists = errors.Register(ModuleName, 405, "proposed certificate revocation already exists") + ErrProposedCertificateRevocationDoesNotExist = errors.Register(ModuleName, 406, "proposed certificate revocation does not exist") + ErrRevokedCertificateDoesNotExist = errors.Register(ModuleName, 407, "revoked certificate does not exist") + ErrInappropriateCertificateType = errors.Register(ModuleName, 408, "inappropriate certificate type") + ErrInvalidCertificate = errors.Register(ModuleName, 409, "invalid certificate") + ErrInvalidDataDigestType = errors.Register(ModuleName, 410, "invalid data digest type") + ErrInvalidRevocationType = errors.Register(ModuleName, 411, "invalid revocation type") + ErrNotEmptyPid = errors.Register(ModuleName, 412, "pid is not empty") + ErrNotEmptyVid = errors.Register(ModuleName, 413, "vid is not empty") + ErrRootCertificateIsNotSelfSigned = errors.Register(ModuleName, 414, "Root certificate is not self-signed") + ErrCRLSignerCertificatePidNotEqualRevocationPointPid = errors.Register(ModuleName, 415, "CRLSignerCertificate pid does not equal revocation point pid") + ErrCRLSignerCertificateVidNotEqualRevocationPointVid = errors.Register(ModuleName, 416, "CRLSignerCertificate vid does not equal revocation point pid") + ErrCRLSignerCertificatePidNotEqualMsgPid = errors.Register(ModuleName, 417, "CRLSignerCertificate pid does not equal message pid") + ErrCRLSignerCertificateVidNotEqualMsgVid = errors.Register(ModuleName, 418, "CRLSignerCertificate vid does not equal message vid") + ErrMessageVidNotEqualAccountVid = errors.Register(ModuleName, 419, "Message vid does not equal the account vid") + ErrNonRootCertificateSelfSigned = errors.Register(ModuleName, 420, "Intermediate or leaf certificate must not be self-signed") + ErrEmptyDataFileSize = errors.Register(ModuleName, 421, "empty data file size") + ErrEmptyDataDigest = errors.Register(ModuleName, 422, "empty data digest") + ErrEmptyDataDigestType = errors.Register(ModuleName, 423, "empty data digest type") + ErrNotEmptyDataDigestType = errors.Register(ModuleName, 424, "not empty data digest type") + ErrDataFieldPresented = errors.Register(ModuleName, 425, "one or more of DataDigest, DataDigestType, DataFileSize fields presented") + ErrWrongSubjectKeyIDFormat = errors.Register(ModuleName, 426, "wrong SubjectKeyID format") + ErrVidNotFound = errors.Register(ModuleName, 427, "vid not found") + ErrPidNotFound = errors.Register(ModuleName, 428, "pid not found") + ErrPemValuesNotEqual = errors.Register(ModuleName, 429, "pem values of certificates are not equal") + ErrPkiRevocationDistributionPointAlreadyExists = errors.Register(ModuleName, 430, "pki revocation distribution point already exists") + ErrPkiRevocationDistributionPointDoesNotExists = errors.Register(ModuleName, 431, "pki revocaition distribution point does not exist") + ErrUnsupportedOperation = errors.Register(ModuleName, 432, "unsupported operation") + ErrInvalidVidFormat = errors.Register(ModuleName, 433, "invalid vid format") + ErrInvalidPidFormat = errors.Register(ModuleName, 434, "invalid pid format") + ErrInvalidDataURLFormat = errors.Register(ModuleName, 435, "invalid data url format") + ErrCertificateVidNotEqualMsgVid = errors.Register(ModuleName, 436, "certificate's vid is not equal to the message vid") + ErrMessageVidNotEqualRootCertVid = errors.Register(ModuleName, 437, "Message vid is not equal to ledger's root certificate vid") + ErrCertNotChainedBack = errors.Register(ModuleName, 438, "Certificate is not chained back to a root certificate on DCL") + ErrCertVidNotEqualAccountVid = errors.Register(ModuleName, 439, "account's vid is not equal to certificate vid") + ErrCertVidNotEqualToRootVid = errors.Register(ModuleName, 440, "certificate's vid is not equal to vid of root certificate ") + ErrCRLSignerCertificateInvalidFormat = errors.Register(ModuleName, 441, "invalid CRLSignerCertificate certificate") + ErrInvalidAuthorityKeyIDFormat = errors.Register(ModuleName, 442, "invalid AuthorityKeyID format") ) func NewErrUnauthorizedRole(transactionName string, requiredRole types.AccountRole) error { - return sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return errors.Wrapf(sdkerrors.ErrUnauthorized, "%s transaction should be signed by an account with the \"%s\" role", transactionName, requiredRole) } func NewErrInvalidAddress(err error) error { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%v)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%v)", err) } func NewErrProposedCertificateAlreadyExists(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf(ErrProposedCertificateAlreadyExists, + return errors.Wrapf(ErrProposedCertificateAlreadyExists, "Proposed X509 root certificate associated with the combination "+ "of subject=%v and subjectKeyID=%v already exists on the ledger", subject, subjectKeyID) } func NewErrProposedCertificateDoesNotExist(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf(ErrProposedCertificateDoesNotExist, + return errors.Wrapf(ErrProposedCertificateDoesNotExist, "No proposed X509 root certificate associated "+ "with the combination of subject=%v and subjectKeyID=%v on the ledger. "+ "The cerificate either does not exists or already approved.", @@ -78,28 +80,28 @@ func NewErrProposedCertificateDoesNotExist(subject string, subjectKeyID string) } func NewErrCertificateAlreadyExists(issuer string, serialNumber string) error { - return sdkerrors.Wrapf(ErrCertificateAlreadyExists, + return errors.Wrapf(ErrCertificateAlreadyExists, "X509 certificate associated with the combination of "+ "issuer=%v and serialNumber=%v already exists on the ledger", issuer, serialNumber) } func NewErrCertificateDoesNotExist(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf(ErrCertificateDoesNotExist, + return errors.Wrapf(ErrCertificateDoesNotExist, "No X509 certificate associated with the "+ "combination of subject=%v and subjectKeyID=%v on the ledger", subject, subjectKeyID) } func NewErrCertificateBySerialNumberDoesNotExist(subject string, subjectKeyID string, serialNumber string) error { - return sdkerrors.Wrapf(ErrCertificateDoesNotExist, + return errors.Wrapf(ErrCertificateDoesNotExist, "No X509 certificate associated with the "+ "combination of subject=%v, subjectKeyID=%v and serialNumber=%v on the ledger", subject, subjectKeyID, serialNumber) } func NewErrRootCertificateDoesNotExist(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf(ErrCertificateDoesNotExist, + return errors.Wrapf(ErrCertificateDoesNotExist, "No X509 root certificate associated with the "+ "combination of subject=%s and subjectKeyID=%s on the ledger", subject, subjectKeyID, @@ -107,141 +109,141 @@ func NewErrRootCertificateDoesNotExist(subject string, subjectKeyID string) erro } func NewErrProposedCertificateRevocationAlreadyExists(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf(ErrProposedCertificateRevocationAlreadyExists, + return errors.Wrapf(ErrProposedCertificateRevocationAlreadyExists, "Proposed X509 root certificate revocation associated with the combination "+ "of subject=%v and subjectKeyID=%v already exists on the ledger", subject, subjectKeyID) } func NewErrProposedCertificateRevocationDoesNotExist(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf(ErrProposedCertificateRevocationDoesNotExist, + return errors.Wrapf(ErrProposedCertificateRevocationDoesNotExist, "No proposed X509 root certificate revocation associated "+ "with the combination of subject=%v and subjectKeyID=%v on the ledger.", subject, subjectKeyID) } func NewErrRevokedCertificateDoesNotExist(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf(ErrRevokedCertificateDoesNotExist, + return errors.Wrapf(ErrRevokedCertificateDoesNotExist, "No revoked X509 certificate associated with the "+ "combination of subject=%v and subjectKeyID=%v on the ledger", subject, subjectKeyID) } func NewErrInappropriateCertificateType(e interface{}) error { - return sdkerrors.Wrapf(ErrInappropriateCertificateType, "%v", + return errors.Wrapf(ErrInappropriateCertificateType, "%v", e) } func NewErrInvalidCertificate(e interface{}) error { - return sdkerrors.Wrapf(ErrInvalidCertificate, "%v", + return errors.Wrapf(ErrInvalidCertificate, "%v", e) } func NewErrInvalidDataDigestType(dataDigestType uint32, allowedDataDigestTypes []uint32) error { - return sdkerrors.Wrapf(ErrInvalidDataDigestType, + return errors.Wrapf(ErrInvalidDataDigestType, "Invalid DataDigestType: %d. Supported types are: %v", dataDigestType, allowedDataDigestTypes) } func NewErrInvalidRevocationType(revocationType uint32, allowedRevocationTypes []uint32) error { - return sdkerrors.Wrapf(ErrInvalidRevocationType, + return errors.Wrapf(ErrInvalidRevocationType, "Invalid RevocationType: %d. Supported types are: %v", revocationType, allowedRevocationTypes) } func NewErrNotEmptyPidForRootCertificate() error { - return sdkerrors.Wrapf(ErrNotEmptyPid, + return errors.Wrapf(ErrNotEmptyPid, "Product ID (pid) must be empty for root certificates") } func NewErrNotEmptyPidForNonRootCertificate() error { - return sdkerrors.Wrapf(ErrNotEmptyPid, + return errors.Wrapf(ErrNotEmptyPid, "Product ID (pid) must be empty when it is not found in non-root CRL Signer Certificate") } func NewErrNotEmptyVid(e interface{}) error { - return sdkerrors.Wrapf(ErrNotEmptyVid, "%v", + return errors.Wrapf(ErrNotEmptyVid, "%v", e) } func NewErrRootCertificateIsNotSelfSigned() error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrRootCertificateIsNotSelfSigned, "Provided root certificate must be self-signed", ) } func NewErrNonRootCertificateSelfSigned() error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrNonRootCertificateSelfSigned, "Provided non-root certificate must not be self-signed", ) } func NewErrUnauthorizedCertIssuer(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return errors.Wrapf(sdkerrors.ErrUnauthorized, "Issuer and authorityKeyID of new certificate with subject=%v and subjectKeyID=%v "+ "must be the same as ones of existing certificates with the same subject and subjectKeyID", subject, subjectKeyID) } func NewErrUnauthorizedCertOwner(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return errors.Wrapf(sdkerrors.ErrUnauthorized, "Only the owner of certificates with subject=%v and subjectKeyID=%v has the authority "+ "to add, remove, or revoke a certificate with the same subject and subjectKeyID", subject, subjectKeyID) } func NewErrUnauthorizedCertVendor(ownerVid int32) error { - return sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return errors.Wrapf(sdkerrors.ErrUnauthorized, "Only the vendor accounts with vid=%d, has the authority "+ "to add, remove, or revoke a certificate with provided subject and subjectKeyID", ownerVid) } func NewErrProvidedNocCertButExistingNotNoc(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf(ErrInappropriateCertificateType, + return errors.Wrapf(ErrInappropriateCertificateType, "The existing certificate with the same combination of subject (%v) and subjectKeyID (%v) is not a NOC certificate", subject, subjectKeyID) } func NewErrRootOfNocCertIsNotNoc(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf(ErrInappropriateCertificateType, + return errors.Wrapf(ErrInappropriateCertificateType, "Root of the provided certificate with subject (%v) and subjectKeyID (%v) is not a NOC certificate", subject, subjectKeyID) } func NewErrProvidedNotNocCertButExistingNoc(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf(ErrInappropriateCertificateType, + return errors.Wrapf(ErrInappropriateCertificateType, "The existing certificate with the same combination of subject (%v) and subjectKeyID (%v) is a NOC certificate", subject, subjectKeyID) } func NewErrProvidedNotNocCertButRootIsNoc() error { - return sdkerrors.Wrapf(ErrInappropriateCertificateType, "The root is NOC certificate, but the provided certificate is not") + return errors.Wrapf(ErrInappropriateCertificateType, "The root is NOC certificate, but the provided certificate is not") } func NewErrRootCertVidNotEqualToCertVid(rootVID int32, certVID int32) error { - return sdkerrors.Wrapf(ErrCertVidNotEqualToRootVid, + return errors.Wrapf(ErrCertVidNotEqualToRootVid, "A child certificate must be also VID scoped to the same VID as a root one: "+ "Root certificate's VID = %v, Child certificate's VID = %v", rootVID, certVID) } func NewErrRootCertVidNotEqualToAccountVid(rootVID int32, accountVID int32) error { - return sdkerrors.Wrapf(ErrCertVidNotEqualAccountVid, + return errors.Wrapf(ErrCertVidNotEqualAccountVid, "Only a Vendor associated with VID of root certificate can add a child certificate: "+ "Root certificate's VID = %v, Account VID = %v", rootVID, accountVID) } func NewErrCRLSignerCertificateInvalidFormat(description string) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrCRLSignerCertificateInvalidFormat, "Invalid CRL Signer Certificate format: %v", description, ) } func NewErrCRLSignerCertificatePidNotEqualMsgPid(certificatePid int32, messagePid int32) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrCRLSignerCertificatePidNotEqualMsgPid, "CRL Signer Certificate's pid=%d must be equal to the provided pid=%d in the message", certificatePid, messagePid, @@ -249,7 +251,7 @@ func NewErrCRLSignerCertificatePidNotEqualMsgPid(certificatePid int32, messagePi } func NewErrCRLSignerCertificateVidNotEqualMsgVid(certificateVid int32, messageVid int32) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrCRLSignerCertificateVidNotEqualMsgVid, "CRL Signer Certificate's vid=%d must be equal to the provided vid=%d in the message", certificateVid, messageVid, @@ -257,48 +259,48 @@ func NewErrCRLSignerCertificateVidNotEqualMsgVid(certificateVid int32, messageVi } func NewErrMessageVidNotEqualRootCertVid(vid1 int32, vid2 int32) error { - return sdkerrors.Wrapf(ErrMessageVidNotEqualRootCertVid, + return errors.Wrapf(ErrMessageVidNotEqualRootCertVid, "Message vid=%d is not equal to ledger's root certificate vid=%d", vid1, vid2) } func NewErrCRLSignerCertificatePidNotEqualRevocationPointPid(certificatePid int32, revocationPointPid int32) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrCRLSignerCertificatePidNotEqualRevocationPointPid, "CRL Signer Certificate's pid=%d must be equal to the provided pid=%d in the reovocation point", certificatePid, revocationPointPid) } func NewErrCRLSignerCertificateVidNotEqualRevocationPointVid(vid1 int32, vid2 int32) error { - return sdkerrors.Wrapf(ErrCRLSignerCertificateVidNotEqualRevocationPointVid, + return errors.Wrapf(ErrCRLSignerCertificateVidNotEqualRevocationPointVid, "CRL Signer Certificate's vid=%d must be equal to the provided vid=%d in the reovocation point", vid1, vid2) } func NewErrNonEmptyDataDigest() error { - return sdkerrors.Wrapf(ErrEmptyDataFileSize, "Data Digest must be provided only if Data File Size is provided") + return errors.Wrapf(ErrEmptyDataFileSize, "Data Digest must be provided only if Data File Size is provided") } func NewErrNotEmptyDataDigestType() error { - return sdkerrors.Wrapf(ErrNotEmptyDataDigestType, "Data Digest Type must be provided only if Data Digest is provided") + return errors.Wrapf(ErrNotEmptyDataDigestType, "Data Digest Type must be provided only if Data Digest is provided") } func NewErrEmptyDataDigest() error { - return sdkerrors.Wrapf(ErrEmptyDataDigest, "Data Digest must be provided if Data File Size is provided") + return errors.Wrapf(ErrEmptyDataDigest, "Data Digest must be provided if Data File Size is provided") } func NewErrEmptyDataDigestType() error { - return sdkerrors.Wrapf(ErrEmptyDataDigestType, "Data Digest Type must be provided if Data Digest is provided") + return errors.Wrapf(ErrEmptyDataDigestType, "Data Digest Type must be provided if Data Digest is provided") } func NewErrDataFieldPresented(revocationType uint32) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrDataFieldPresented, "Data Digest, Data File Size and Data Digest Type must be omitted for Revocation Type %d", revocationType, ) } func NewErrWrongSubjectKeyIDFormat() error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrWrongSubjectKeyIDFormat, "Wrong SubjectKeyID format. It must consist of even number of uppercase hexadecimal characters ([0-9A-F]), "+ "with no whitespace and no non-hexadecimal characters", @@ -306,7 +308,7 @@ func NewErrWrongSubjectKeyIDFormat() error { } func NewErrWrongIssuerSubjectKeyIDFormat() error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrWrongSubjectKeyIDFormat, "Wrong IssuerSubjectKeyID format. It must consist of even number of uppercase hexadecimal characters ([0-9A-F]), "+ "with no whitespace and no non-hexadecimal characters", @@ -314,7 +316,7 @@ func NewErrWrongIssuerSubjectKeyIDFormat() error { } func NewErrInvalidAuthorityKeyIDFormat() error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrInvalidAuthorityKeyIDFormat, "Invalid AuthorityKeyID format. It must consist of even number of uppercase hexadecimal characters ([0-9A-F]), "+ "with no whitespace and no non-hexadecimal characters", @@ -322,26 +324,25 @@ func NewErrInvalidAuthorityKeyIDFormat() error { } func NewErrVidNotFound(e interface{}) error { - return sdkerrors.Wrapf(ErrVidNotFound, "%v", - e) + return errors.Wrapf(ErrVidNotFound, "%v", e) } func NewErrPidNotFoundInCertificateButProvidedInRevocationPoint() error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrPidNotFound, "Product ID (pid) not found in CRL Signer Certificate when it is provided in the revocation point", ) } func NewErrPidNotFoundInMessage(certificatePid int32) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrPidNotFound, "Product ID (pid) must be provided when pid=%d in non-root CRL Signer Certificate", certificatePid, ) } func NewErrPemValuesNotEqual(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrPemValuesNotEqual, "PEM values of the CRL signer certificate and a certificate found by subject=%s and subjectKeyID=%s are not equal", subject, subjectKeyID, @@ -349,7 +350,7 @@ func NewErrPemValuesNotEqual(subject string, subjectKeyID string) error { } func NewErrPkiRevocationDistributionPointWithVidAndLabelAlreadyExists(vid int32, label string, issuerSubjectKeyID string) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrPkiRevocationDistributionPointAlreadyExists, "PKI revocation distribution point associated with vid=%d and label=%s already exist for issuerSubjectKeyID=%s", vid, label, issuerSubjectKeyID, @@ -357,7 +358,7 @@ func NewErrPkiRevocationDistributionPointWithVidAndLabelAlreadyExists(vid int32, } func NewErrPkiRevocationDistributionPointWithDataURLAlreadyExists(dataURL string, issuerSubjectKeyID string) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrPkiRevocationDistributionPointAlreadyExists, "PKI revocation distribution point associated with dataUrl=%s already exist for issuerSubjectKeyID=%s", dataURL, issuerSubjectKeyID, @@ -365,7 +366,7 @@ func NewErrPkiRevocationDistributionPointWithDataURLAlreadyExists(dataURL string } func NewErrPkiRevocationDistributionPointDoesNotExists(vid int32, label string, issuerSubjectKeyID string) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrPkiRevocationDistributionPointDoesNotExists, "PKI revocation distribution point associated with vid=%d and label=%s does not exist for issuerSubjectKeyID=%s", vid, label, issuerSubjectKeyID, @@ -373,50 +374,50 @@ func NewErrPkiRevocationDistributionPointDoesNotExists(vid int32, label string, } func NewErrMessageVidNotEqualAccountVid(msgVid int32, accountVid int32) error { - return sdkerrors.Wrapf(ErrMessageVidNotEqualAccountVid, "Message vid=%d is not equal to account vid=%d", msgVid, accountVid) + return errors.Wrapf(ErrMessageVidNotEqualAccountVid, "Message vid=%d is not equal to account vid=%d", msgVid, accountVid) } func NewErrMessageExpectedNonRoot(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf(ErrInappropriateCertificateType, "Inappropriate Certificate Type: Certificate with subject=%s and subjectKeyID=%s "+ + return errors.Wrapf(ErrInappropriateCertificateType, "Inappropriate Certificate Type: Certificate with subject=%s and subjectKeyID=%s "+ "is a root certificate.", subject, subjectKeyID, ) } func NewErrMessageExistingCertIsNotRoot(subject string, subjectKeyID string) error { - return sdkerrors.Wrapf(ErrInappropriateCertificateType, + return errors.Wrapf(ErrInappropriateCertificateType, "The existing certificate with the same combination of subject (%v) and subjectKeyID (%v) is not a root certificate", subject, subjectKeyID, ) } func NewErrUnsupportedOperation(e interface{}) error { - return sdkerrors.Wrapf(ErrUnsupportedOperation, "%v", e) + return errors.Wrapf(ErrUnsupportedOperation, "%v", e) } func NewErrInvalidVidFormat(e interface{}) error { - return sdkerrors.Wrapf(ErrInvalidVidFormat, "Could not parse vid: %v", e) + return errors.Wrapf(ErrInvalidVidFormat, "Could not parse vid: %v", e) } func NewErrInvalidPidFormat(e interface{}) error { - return sdkerrors.Wrapf(ErrInvalidPidFormat, "Could not parse pid: %v", e) + return errors.Wrapf(ErrInvalidPidFormat, "Could not parse pid: %v", e) } func NewErrInvalidDataURLSchema() error { - return sdkerrors.Wrapf(ErrInvalidDataURLFormat, "Data Url must start with https:// or http://") + return errors.Wrapf(ErrInvalidDataURLFormat, "Data Url must start with https:// or http://") } func NewErrCertificateVidNotEqualMsgVid(e interface{}) error { - return sdkerrors.Wrapf(ErrCertificateVidNotEqualMsgVid, "%v", e) + return errors.Wrapf(ErrCertificateVidNotEqualMsgVid, "%v", e) } func NewErrCertNotChainedBack() error { - return sdkerrors.Wrapf(ErrCertNotChainedBack, "CRL Signer Certificate is not chained back to root certificate on DCL") + return errors.Wrapf(ErrCertNotChainedBack, "CRL Signer Certificate is not chained back to root certificate on DCL") } func NewErrCRLSignerCertNotChainedBackToDelegator() error { - return sdkerrors.Wrapf(ErrCertNotChainedBack, "CRL Signer Certificate is not chained back to delegated PAI CRL Signer certificate") + return errors.Wrapf(ErrCertNotChainedBack, "CRL Signer Certificate is not chained back to delegated PAI CRL Signer certificate") } func NewErrCRLSignerCertDelegatorNotChainedBack() error { - return sdkerrors.Wrapf(ErrCertNotChainedBack, "Delegated CRL Signer Certificate is not chained back to root certificate on DCL") + return errors.Wrapf(ErrCertNotChainedBack, "Delegated CRL Signer Certificate is not chained back to root certificate on DCL") } diff --git a/utils/cli/utils.go b/utils/cli/utils.go index fdf9f1677..9eb1ce957 100644 --- a/utils/cli/utils.go +++ b/utils/cli/utils.go @@ -3,15 +3,14 @@ package cli import ( "errors" "fmt" - "io/ioutil" "os" "strings" + rpctypes "github.com/cometbft/cometbft/rpc/jsonrpc/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/flags" "github.com/cosmos/cosmos-sdk/codec" "github.com/spf13/cobra" - rpctypes "github.com/tendermint/tendermint/rpc/jsonrpc/types" ) const ( @@ -74,7 +73,7 @@ func QueryWithProofList(clientCtx client.Context, storeName string, keyPrefix st func ReadFromFile(target string) (string, error) { if _, err := os.Stat(target); err == nil { // check whether it is a path - bytes, err := ioutil.ReadFile(target) + bytes, err := os.ReadFile(target) if err != nil { return "", err } @@ -123,7 +122,7 @@ func IsEmptySubtreeRPCError(err error) bool { return false } - return strings.Contains(rpcerror.Message, "Internal error") && strings.Contains(rpcerror.Data, "Nonexistence proof has empty Left and Right proof") + return strings.Contains(rpcerror.Message, "Internal error") && strings.Contains(rpcerror.Data, "nonexistence proof has empty Left and Right proof") } func isEOFError(err error) bool { @@ -140,11 +139,11 @@ func AddTxFlagsToCmd(cmd *cobra.Command) { // TODO there might be a better way how to filter that hiddenFlags := []string{ flags.FlagFees, - flags.FlagFeeAccount, + flags.FlagFeeGranter, flags.FlagGasPrices, flags.FlagGasAdjustment, flags.FlagGas, - flags.FlagFeeAccount, + flags.FlagFeePayer, flags.FlagDryRun, // TODO that flag might be actually useful but relates to gas } for _, f := range hiddenFlags { diff --git a/utils/misc.go b/utils/misc.go index e1ff4e3ec..2771a7030 100644 --- a/utils/misc.go +++ b/utils/misc.go @@ -1,13 +1,12 @@ package utils import ( - "io/ioutil" "os" ) func ReadFromFile(target string) (string, error) { if _, err := os.Stat(target); err == nil { // check whether it is a path - bytes, err := ioutil.ReadFile(target) + bytes, err := os.ReadFile(target) if err != nil { return "", err } diff --git a/utils/validator/errors.go b/utils/validator/errors.go index 188301b62..17f14c9ad 100644 --- a/utils/validator/errors.go +++ b/utils/validator/errors.go @@ -15,7 +15,7 @@ package validator import ( - sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "cosmossdk.io/errors" ) const ( @@ -23,10 +23,10 @@ const ( ) var ( - ErrRequiredFieldMissing = sdkerrors.Register(Codespace, 900, "required field missing") - ErrFieldMaxLengthExceeded = sdkerrors.Register(Codespace, 901, "field max length exceeded") - ErrFieldNotValid = sdkerrors.Register(Codespace, 902, "field not valid") - ErrFieldLowerBoundViolated = sdkerrors.Register(Codespace, 903, "field lower bound violated") - ErrFieldUpperBoundViolated = sdkerrors.Register(Codespace, 904, "field upper bound violated") - ErrFieldMinLengthNotReached = sdkerrors.Register(Codespace, 905, "field min length not reached") + ErrRequiredFieldMissing = errors.Register(Codespace, 900, "required field missing") + ErrFieldMaxLengthExceeded = errors.Register(Codespace, 901, "field max length exceeded") + ErrFieldNotValid = errors.Register(Codespace, 902, "field not valid") + ErrFieldLowerBoundViolated = errors.Register(Codespace, 903, "field lower bound violated") + ErrFieldUpperBoundViolated = errors.Register(Codespace, 904, "field upper bound violated") + ErrFieldMinLengthNotReached = errors.Register(Codespace, 905, "field min length not reached") ) diff --git a/utils/validator/validator.go b/utils/validator/validator.go index c2a7d554f..7272924eb 100644 --- a/utils/validator/validator.go +++ b/utils/validator/validator.go @@ -18,11 +18,11 @@ import ( "fmt" "strings" - sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "cosmossdk.io/errors" "github.com/go-playground/locales/en" ut "github.com/go-playground/universal-translator" "github.com/go-playground/validator/v10" - en_translations "github.com/go-playground/validator/v10/translations/en" + enTranslations "github.com/go-playground/validator/v10/translations/en" ) var ( @@ -39,7 +39,7 @@ func Validate(s interface{}) error { vl = validator.New() - _ = en_translations.RegisterDefaultTranslations(vl, trans) + _ = enTranslations.RegisterDefaultTranslations(vl, trans) _ = vl.RegisterTranslation("required", trans, func(ut ut.Translator) error { return ut.Add("required", "{0} is a required field", true) // see universal-translator for details @@ -137,27 +137,27 @@ func Validate(s interface{}) error { for _, e := range errs.(validator.ValidationErrors) { if e.Tag() == "required" || e.Tag() == "required_with" || e.Tag() == "required_if" || e.Tag() == "required_unless" { - return sdkerrors.Wrap(ErrRequiredFieldMissing, e.Translate(trans)) + return errors.Wrap(ErrRequiredFieldMissing, e.Translate(trans)) } if e.Tag() == "max" { - return sdkerrors.Wrap(ErrFieldMaxLengthExceeded, e.Translate(trans)) + return errors.Wrap(ErrFieldMaxLengthExceeded, e.Translate(trans)) } if e.Tag() == "min" { - return sdkerrors.Wrap(ErrFieldMinLengthNotReached, e.Translate(trans)) + return errors.Wrap(ErrFieldMinLengthNotReached, e.Translate(trans)) } if e.Tag() == "url" || e.Tag() == "startsnotwith" || e.Tag() == "gtecsfield" { - return sdkerrors.Wrap(ErrFieldNotValid, e.Translate(trans)) + return errors.Wrap(ErrFieldNotValid, e.Translate(trans)) } if e.Tag() == "gte" { - return sdkerrors.Wrap(ErrFieldLowerBoundViolated, e.Translate(trans)) + return errors.Wrap(ErrFieldLowerBoundViolated, e.Translate(trans)) } if e.Tag() == "lte" { - return sdkerrors.Wrap(ErrFieldUpperBoundViolated, e.Translate(trans)) + return errors.Wrap(ErrFieldUpperBoundViolated, e.Translate(trans)) } } } diff --git a/vue/package.json b/vue/package.json index 015d4077a..65d69f193 100644 --- a/vue/package.json +++ b/vue/package.json @@ -1,16 +1,5 @@ { - "name": "@starport/template", - "version": "0.1.53", - "description": "A Vue 3 boilerplate project utilizing @starport/vue and @starport/vuex", - "author": "Tendermint, Inc ", - "private": true, - "scripts": { - "serve": "vue-cli-service serve --mode=production", - "dev": "vue-cli-service serve", - "lint": "vue-cli-service lint", - "build": "vue-cli-service build", - "test": "echo \"Error: run tests from root\" && exit 1" - }, + "author": "Tendermint, Inc \u003chello@tendermint.com\u003e", "dependencies": { "@cosmjs/launchpad": "0.25.0", "@cosmjs/proto-signing": "0.25.0", @@ -25,8 +14,10 @@ "vue": "^3.0.11", "vue-router": "^4.0.3", "vue-uuid": "^2.0.2", - "vuex": "^4.0.0" + "vuex": "^4.0.0", + "zigbee-alliance-distributed-compliance-ledger-client-ts": "file:../ts-client" }, + "description": "A Vue 3 boilerplate project utilizing @starport/vue and @starport/vuex", "devDependencies": { "@babel/core": "^7.14.0", "@babel/node": "^7.13.13", @@ -43,5 +34,15 @@ "prettier": "^2.2.1", "protobufjs": "^6.11.2", "sass-loader": "^8.0.2" - } + }, + "name": "@starport/template", + "private": true, + "scripts": { + "build": "vue-cli-service build", + "dev": "vue-cli-service serve", + "lint": "vue-cli-service lint", + "serve": "vue-cli-service serve --mode=production", + "test": "echo \"Error: run tests from root\" \u0026\u0026 exit 1" + }, + "version": "0.1.53" } diff --git a/vue/src/composables/useClient.ts b/vue/src/composables/useClient.ts new file mode 100755 index 000000000..83e561d9c --- /dev/null +++ b/vue/src/composables/useClient.ts @@ -0,0 +1,15 @@ +import { Client } from 'zigbee-alliance-distributed-compliance-ledger-client-ts' +import { env } from '../env'; + +const useClientInstance = () => { + const client = new Client(env); + return client; +}; +let clientInstance: ReturnType; + +export const useClient = () => { + if (!clientInstance) { + clientInstance = useClientInstance(); + } + return clientInstance; +}; \ No newline at end of file diff --git a/vue/src/composables/useCosmosAuthV1Beta1/index.ts b/vue/src/composables/useCosmosAuthV1Beta1/index.ts new file mode 100755 index 000000000..0e4a3defa --- /dev/null +++ b/vue/src/composables/useCosmosAuthV1Beta1/index.ts @@ -0,0 +1,95 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useCosmosAuthV1Beta1() { + const client = useClient(); + const QueryAccounts = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryAccounts', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.CosmosAuthV1Beta1.query.queryAccounts(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryAccount = (address: string, options: any) => { + const key = { type: 'QueryAccount', address }; + return useQuery([key], () => { + const { address } = key + return client.CosmosAuthV1Beta1.query.queryAccount(address).then( res => res.data ); + }, options); + } + + const QueryAccountAddressByID = (id: string, query: any, options: any) => { + const key = { type: 'QueryAccountAddressByID', id, query }; + return useQuery([key], () => { + const { id,query } = key + return client.CosmosAuthV1Beta1.query.queryAccountAddressByID(id, query ?? undefined).then( res => res.data ); + }, options); + } + + const QueryParams = ( options: any) => { + const key = { type: 'QueryParams', }; + return useQuery([key], () => { + return client.CosmosAuthV1Beta1.query.queryParams().then( res => res.data ); + }, options); + } + + const QueryModuleAccounts = ( options: any) => { + const key = { type: 'QueryModuleAccounts', }; + return useQuery([key], () => { + return client.CosmosAuthV1Beta1.query.queryModuleAccounts().then( res => res.data ); + }, options); + } + + const QueryModuleAccountByName = (name: string, options: any) => { + const key = { type: 'QueryModuleAccountByName', name }; + return useQuery([key], () => { + const { name } = key + return client.CosmosAuthV1Beta1.query.queryModuleAccountByName(name).then( res => res.data ); + }, options); + } + + const QueryBech32Prefix = ( options: any) => { + const key = { type: 'QueryBech32Prefix', }; + return useQuery([key], () => { + return client.CosmosAuthV1Beta1.query.queryBech32Prefix().then( res => res.data ); + }, options); + } + + const QueryAddressBytesToString = (address_bytes: string, options: any) => { + const key = { type: 'QueryAddressBytesToString', address_bytes }; + return useQuery([key], () => { + const { address_bytes } = key + return client.CosmosAuthV1Beta1.query.queryAddressBytesToString(address_bytes).then( res => res.data ); + }, options); + } + + const QueryAddressStringToBytes = (address_string: string, options: any) => { + const key = { type: 'QueryAddressStringToBytes', address_string }; + return useQuery([key], () => { + const { address_string } = key + return client.CosmosAuthV1Beta1.query.queryAddressStringToBytes(address_string).then( res => res.data ); + }, options); + } + + const QueryAccountInfo = (address: string, options: any) => { + const key = { type: 'QueryAccountInfo', address }; + return useQuery([key], () => { + const { address } = key + return client.CosmosAuthV1Beta1.query.queryAccountInfo(address).then( res => res.data ); + }, options); + } + + return {QueryAccounts,QueryAccount,QueryAccountAddressByID,QueryParams,QueryModuleAccounts,QueryModuleAccountByName,QueryBech32Prefix,QueryAddressBytesToString,QueryAddressStringToBytes,QueryAccountInfo, + } +} \ No newline at end of file diff --git a/vue/src/composables/useCosmosBaseTendermintV1Beta1/index.ts b/vue/src/composables/useCosmosBaseTendermintV1Beta1/index.ts new file mode 100755 index 000000000..848c81053 --- /dev/null +++ b/vue/src/composables/useCosmosBaseTendermintV1Beta1/index.ts @@ -0,0 +1,79 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useCosmosBaseTendermintV1Beta1() { + const client = useClient(); + const ServiceGetNodeInfo = ( options: any) => { + const key = { type: 'ServiceGetNodeInfo', }; + return useQuery([key], () => { + return client.CosmosBaseTendermintV1Beta1.query.serviceGetNodeInfo().then( res => res.data ); + }, options); + } + + const ServiceGetSyncing = ( options: any) => { + const key = { type: 'ServiceGetSyncing', }; + return useQuery([key], () => { + return client.CosmosBaseTendermintV1Beta1.query.serviceGetSyncing().then( res => res.data ); + }, options); + } + + const ServiceGetLatestBlock = ( options: any) => { + const key = { type: 'ServiceGetLatestBlock', }; + return useQuery([key], () => { + return client.CosmosBaseTendermintV1Beta1.query.serviceGetLatestBlock().then( res => res.data ); + }, options); + } + + const ServiceGetBlockByHeight = (height: string, options: any) => { + const key = { type: 'ServiceGetBlockByHeight', height }; + return useQuery([key], () => { + const { height } = key + return client.CosmosBaseTendermintV1Beta1.query.serviceGetBlockByHeight(height).then( res => res.data ); + }, options); + } + + const ServiceGetLatestValidatorSet = (query: any, options: any, perPage: number) => { + const key = { type: 'ServiceGetLatestValidatorSet', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.CosmosBaseTendermintV1Beta1.query.serviceGetLatestValidatorSet(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const ServiceGetValidatorSetByHeight = (height: string, query: any, options: any, perPage: number) => { + const key = { type: 'ServiceGetValidatorSetByHeight', height, query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const { height,query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.CosmosBaseTendermintV1Beta1.query.serviceGetValidatorSetByHeight(height, query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const ServiceABCIQuery = (query: any, options: any) => { + const key = { type: 'ServiceABCIQuery', query }; + return useQuery([key], () => { + const {query } = key + return client.CosmosBaseTendermintV1Beta1.query.serviceABCIQuery(query ?? undefined).then( res => res.data ); + }, options); + } + + return {ServiceGetNodeInfo,ServiceGetSyncing,ServiceGetLatestBlock,ServiceGetBlockByHeight,ServiceGetLatestValidatorSet,ServiceGetValidatorSetByHeight,ServiceABCIQuery, + } +} \ No newline at end of file diff --git a/vue/src/composables/useCosmosConsensusV1/index.ts b/vue/src/composables/useCosmosConsensusV1/index.ts new file mode 100755 index 000000000..37763b968 --- /dev/null +++ b/vue/src/composables/useCosmosConsensusV1/index.ts @@ -0,0 +1,17 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useCosmosConsensusV1() { + const client = useClient(); + const QueryParams = ( options: any) => { + const key = { type: 'QueryParams', }; + return useQuery([key], () => { + return client.CosmosConsensusV1.query.queryParams().then( res => res.data ); + }, options); + } + + return {QueryParams, + } +} \ No newline at end of file diff --git a/vue/src/composables/useCosmosParamsV1Beta1/index.ts b/vue/src/composables/useCosmosParamsV1Beta1/index.ts new file mode 100755 index 000000000..0100c4f7a --- /dev/null +++ b/vue/src/composables/useCosmosParamsV1Beta1/index.ts @@ -0,0 +1,25 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useCosmosParamsV1Beta1() { + const client = useClient(); + const QueryParams = (query: any, options: any) => { + const key = { type: 'QueryParams', query }; + return useQuery([key], () => { + const {query } = key + return client.CosmosParamsV1Beta1.query.queryParams(query ?? undefined).then( res => res.data ); + }, options); + } + + const QuerySubspaces = ( options: any) => { + const key = { type: 'QuerySubspaces', }; + return useQuery([key], () => { + return client.CosmosParamsV1Beta1.query.querySubspaces().then( res => res.data ); + }, options); + } + + return {QueryParams,QuerySubspaces, + } +} \ No newline at end of file diff --git a/vue/src/composables/useCosmosTxV1Beta1/index.ts b/vue/src/composables/useCosmosTxV1Beta1/index.ts new file mode 100755 index 000000000..5d4848958 --- /dev/null +++ b/vue/src/composables/useCosmosTxV1Beta1/index.ts @@ -0,0 +1,92 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useCosmosTxV1Beta1() { + const client = useClient(); + const ServiceSimulate = ( options: any) => { + const key = { type: 'ServiceSimulate', }; + return useQuery([key], () => { + return client.CosmosTxV1Beta1.query.serviceSimulate({...key}).then( res => res.data ); + }, options); + } + + const ServiceGetTx = (hash: string, options: any) => { + const key = { type: 'ServiceGetTx', hash }; + return useQuery([key], () => { + const { hash } = key + return client.CosmosTxV1Beta1.query.serviceGetTx(hash).then( res => res.data ); + }, options); + } + + const ServiceBroadcastTx = ( options: any) => { + const key = { type: 'ServiceBroadcastTx', }; + return useQuery([key], () => { + return client.CosmosTxV1Beta1.query.serviceBroadcastTx({...key}).then( res => res.data ); + }, options); + } + + const ServiceGetTxsEvent = (query: any, options: any, perPage: number) => { + const key = { type: 'ServiceGetTxsEvent', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.CosmosTxV1Beta1.query.serviceGetTxsEvent(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const ServiceGetBlockWithTxs = (height: string, query: any, options: any, perPage: number) => { + const key = { type: 'ServiceGetBlockWithTxs', height, query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const { height,query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.CosmosTxV1Beta1.query.serviceGetBlockWithTxs(height, query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const ServiceTxDecode = ( options: any) => { + const key = { type: 'ServiceTxDecode', }; + return useQuery([key], () => { + return client.CosmosTxV1Beta1.query.serviceTxDecode({...key}).then( res => res.data ); + }, options); + } + + const ServiceTxEncode = ( options: any) => { + const key = { type: 'ServiceTxEncode', }; + return useQuery([key], () => { + return client.CosmosTxV1Beta1.query.serviceTxEncode({...key}).then( res => res.data ); + }, options); + } + + const ServiceTxEncodeAmino = ( options: any) => { + const key = { type: 'ServiceTxEncodeAmino', }; + return useQuery([key], () => { + return client.CosmosTxV1Beta1.query.serviceTxEncodeAmino({...key}).then( res => res.data ); + }, options); + } + + const ServiceTxDecodeAmino = ( options: any) => { + const key = { type: 'ServiceTxDecodeAmino', }; + return useQuery([key], () => { + return client.CosmosTxV1Beta1.query.serviceTxDecodeAmino({...key}).then( res => res.data ); + }, options); + } + + return {ServiceSimulate,ServiceGetTx,ServiceBroadcastTx,ServiceGetTxsEvent,ServiceGetBlockWithTxs,ServiceTxDecode,ServiceTxEncode,ServiceTxEncodeAmino,ServiceTxDecodeAmino, + } +} \ No newline at end of file diff --git a/vue/src/composables/useCosmosUpgradeV1Beta1/index.ts b/vue/src/composables/useCosmosUpgradeV1Beta1/index.ts new file mode 100755 index 000000000..df01c795a --- /dev/null +++ b/vue/src/composables/useCosmosUpgradeV1Beta1/index.ts @@ -0,0 +1,48 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useCosmosUpgradeV1Beta1() { + const client = useClient(); + const QueryCurrentPlan = ( options: any) => { + const key = { type: 'QueryCurrentPlan', }; + return useQuery([key], () => { + return client.CosmosUpgradeV1Beta1.query.queryCurrentPlan().then( res => res.data ); + }, options); + } + + const QueryAppliedPlan = (name: string, options: any) => { + const key = { type: 'QueryAppliedPlan', name }; + return useQuery([key], () => { + const { name } = key + return client.CosmosUpgradeV1Beta1.query.queryAppliedPlan(name).then( res => res.data ); + }, options); + } + + const QueryUpgradedConsensusState = (last_height: string, options: any) => { + const key = { type: 'QueryUpgradedConsensusState', last_height }; + return useQuery([key], () => { + const { last_height } = key + return client.CosmosUpgradeV1Beta1.query.queryUpgradedConsensusState(last_height).then( res => res.data ); + }, options); + } + + const QueryModuleVersions = (query: any, options: any) => { + const key = { type: 'QueryModuleVersions', query }; + return useQuery([key], () => { + const {query } = key + return client.CosmosUpgradeV1Beta1.query.queryModuleVersions(query ?? undefined).then( res => res.data ); + }, options); + } + + const QueryAuthority = ( options: any) => { + const key = { type: 'QueryAuthority', }; + return useQuery([key], () => { + return client.CosmosUpgradeV1Beta1.query.queryAuthority().then( res => res.data ); + }, options); + } + + return {QueryCurrentPlan,QueryAppliedPlan,QueryUpgradedConsensusState,QueryModuleVersions,QueryAuthority, + } +} \ No newline at end of file diff --git a/vue/src/composables/useCosmosVestingV1Beta1/index.ts b/vue/src/composables/useCosmosVestingV1Beta1/index.ts new file mode 100755 index 000000000..06ef4c3a8 --- /dev/null +++ b/vue/src/composables/useCosmosVestingV1Beta1/index.ts @@ -0,0 +1,10 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useCosmosVestingV1Beta1() { + const client = useClient(); + return { + } +} \ No newline at end of file diff --git a/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerCompliance/index.ts b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerCompliance/index.ts new file mode 100755 index 000000000..bd040a4c4 --- /dev/null +++ b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerCompliance/index.ts @@ -0,0 +1,130 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useZigbeeallianceDistributedcomplianceledgerCompliance() { + const client = useClient(); + const QueryComplianceInfo = (vid: string, pid: string, softwareVersion: string, certificationType: string, options: any) => { + const key = { type: 'QueryComplianceInfo', vid, pid, softwareVersion, certificationType }; + return useQuery([key], () => { + const { vid, pid, softwareVersion, certificationType } = key + return client.ZigbeeallianceDistributedcomplianceledgerCompliance.query.queryComplianceInfo(vid, pid, softwareVersion, certificationType).then( res => res.data ); + }, options); + } + + const QueryComplianceInfoAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryComplianceInfoAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerCompliance.query.queryComplianceInfoAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryCertifiedModel = (vid: string, pid: string, softwareVersion: string, certificationType: string, options: any) => { + const key = { type: 'QueryCertifiedModel', vid, pid, softwareVersion, certificationType }; + return useQuery([key], () => { + const { vid, pid, softwareVersion, certificationType } = key + return client.ZigbeeallianceDistributedcomplianceledgerCompliance.query.queryCertifiedModel(vid, pid, softwareVersion, certificationType).then( res => res.data ); + }, options); + } + + const QueryCertifiedModelAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryCertifiedModelAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerCompliance.query.queryCertifiedModelAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryRevokedModel = (vid: string, pid: string, softwareVersion: string, certificationType: string, options: any) => { + const key = { type: 'QueryRevokedModel', vid, pid, softwareVersion, certificationType }; + return useQuery([key], () => { + const { vid, pid, softwareVersion, certificationType } = key + return client.ZigbeeallianceDistributedcomplianceledgerCompliance.query.queryRevokedModel(vid, pid, softwareVersion, certificationType).then( res => res.data ); + }, options); + } + + const QueryRevokedModelAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryRevokedModelAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerCompliance.query.queryRevokedModelAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryProvisionalModel = (vid: string, pid: string, softwareVersion: string, certificationType: string, options: any) => { + const key = { type: 'QueryProvisionalModel', vid, pid, softwareVersion, certificationType }; + return useQuery([key], () => { + const { vid, pid, softwareVersion, certificationType } = key + return client.ZigbeeallianceDistributedcomplianceledgerCompliance.query.queryProvisionalModel(vid, pid, softwareVersion, certificationType).then( res => res.data ); + }, options); + } + + const QueryProvisionalModelAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryProvisionalModelAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerCompliance.query.queryProvisionalModelAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryDeviceSoftwareCompliance = (cDCertificateId: string, options: any) => { + const key = { type: 'QueryDeviceSoftwareCompliance', cDCertificateId }; + return useQuery([key], () => { + const { cDCertificateId } = key + return client.ZigbeeallianceDistributedcomplianceledgerCompliance.query.queryDeviceSoftwareCompliance(cDCertificateId).then( res => res.data ); + }, options); + } + + const QueryDeviceSoftwareComplianceAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryDeviceSoftwareComplianceAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerCompliance.query.queryDeviceSoftwareComplianceAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + return {QueryComplianceInfo,QueryComplianceInfoAll,QueryCertifiedModel,QueryCertifiedModelAll,QueryRevokedModel,QueryRevokedModelAll,QueryProvisionalModel,QueryProvisionalModelAll,QueryDeviceSoftwareCompliance,QueryDeviceSoftwareComplianceAll, + } +} \ No newline at end of file diff --git a/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerDclauth/index.ts b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerDclauth/index.ts new file mode 100755 index 000000000..c97c81510 --- /dev/null +++ b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerDclauth/index.ts @@ -0,0 +1,137 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useZigbeeallianceDistributedcomplianceledgerDclauth() { + const client = useClient(); + const QueryAccount = (address: string, options: any) => { + const key = { type: 'QueryAccount', address }; + return useQuery([key], () => { + const { address } = key + return client.ZigbeeallianceDistributedcomplianceledgerDclauth.query.queryAccount(address).then( res => res.data ); + }, options); + } + + const QueryAccountAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryAccountAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerDclauth.query.queryAccountAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryPendingAccount = (address: string, options: any) => { + const key = { type: 'QueryPendingAccount', address }; + return useQuery([key], () => { + const { address } = key + return client.ZigbeeallianceDistributedcomplianceledgerDclauth.query.queryPendingAccount(address).then( res => res.data ); + }, options); + } + + const QueryPendingAccountAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryPendingAccountAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerDclauth.query.queryPendingAccountAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryPendingAccountRevocation = (address: string, options: any) => { + const key = { type: 'QueryPendingAccountRevocation', address }; + return useQuery([key], () => { + const { address } = key + return client.ZigbeeallianceDistributedcomplianceledgerDclauth.query.queryPendingAccountRevocation(address).then( res => res.data ); + }, options); + } + + const QueryPendingAccountRevocationAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryPendingAccountRevocationAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerDclauth.query.queryPendingAccountRevocationAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryAccountStat = ( options: any) => { + const key = { type: 'QueryAccountStat', }; + return useQuery([key], () => { + return client.ZigbeeallianceDistributedcomplianceledgerDclauth.query.queryAccountStat().then( res => res.data ); + }, options); + } + + const QueryRevokedAccount = (address: string, options: any) => { + const key = { type: 'QueryRevokedAccount', address }; + return useQuery([key], () => { + const { address } = key + return client.ZigbeeallianceDistributedcomplianceledgerDclauth.query.queryRevokedAccount(address).then( res => res.data ); + }, options); + } + + const QueryRevokedAccountAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryRevokedAccountAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerDclauth.query.queryRevokedAccountAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryRejectedAccount = (address: string, options: any) => { + const key = { type: 'QueryRejectedAccount', address }; + return useQuery([key], () => { + const { address } = key + return client.ZigbeeallianceDistributedcomplianceledgerDclauth.query.queryRejectedAccount(address).then( res => res.data ); + }, options); + } + + const QueryRejectedAccountAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryRejectedAccountAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerDclauth.query.queryRejectedAccountAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + return {QueryAccount,QueryAccountAll,QueryPendingAccount,QueryPendingAccountAll,QueryPendingAccountRevocation,QueryPendingAccountRevocationAll,QueryAccountStat,QueryRevokedAccount,QueryRevokedAccountAll,QueryRejectedAccount,QueryRejectedAccountAll, + } +} \ No newline at end of file diff --git a/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerDclgenutil/index.ts b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerDclgenutil/index.ts new file mode 100755 index 000000000..9193a2b4a --- /dev/null +++ b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerDclgenutil/index.ts @@ -0,0 +1,10 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useZigbeeallianceDistributedcomplianceledgerDclgenutil() { + const client = useClient(); + return { + } +} \ No newline at end of file diff --git a/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerDclupgrade/index.ts b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerDclupgrade/index.ts new file mode 100755 index 000000000..6b0b8edc2 --- /dev/null +++ b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerDclupgrade/index.ts @@ -0,0 +1,82 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useZigbeeallianceDistributedcomplianceledgerDclupgrade() { + const client = useClient(); + const QueryProposedUpgrade = (name: string, options: any) => { + const key = { type: 'QueryProposedUpgrade', name }; + return useQuery([key], () => { + const { name } = key + return client.ZigbeeallianceDistributedcomplianceledgerDclupgrade.query.queryProposedUpgrade(name).then( res => res.data ); + }, options); + } + + const QueryProposedUpgradeAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryProposedUpgradeAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerDclupgrade.query.queryProposedUpgradeAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryApprovedUpgrade = (name: string, options: any) => { + const key = { type: 'QueryApprovedUpgrade', name }; + return useQuery([key], () => { + const { name } = key + return client.ZigbeeallianceDistributedcomplianceledgerDclupgrade.query.queryApprovedUpgrade(name).then( res => res.data ); + }, options); + } + + const QueryApprovedUpgradeAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryApprovedUpgradeAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerDclupgrade.query.queryApprovedUpgradeAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryRejectedUpgrade = (name: string, options: any) => { + const key = { type: 'QueryRejectedUpgrade', name }; + return useQuery([key], () => { + const { name } = key + return client.ZigbeeallianceDistributedcomplianceledgerDclupgrade.query.queryRejectedUpgrade(name).then( res => res.data ); + }, options); + } + + const QueryRejectedUpgradeAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryRejectedUpgradeAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerDclupgrade.query.queryRejectedUpgradeAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + return {QueryProposedUpgrade,QueryProposedUpgradeAll,QueryApprovedUpgrade,QueryApprovedUpgradeAll,QueryRejectedUpgrade,QueryRejectedUpgradeAll, + } +} \ No newline at end of file diff --git a/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerModel/index.ts b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerModel/index.ts new file mode 100755 index 000000000..1c581eadc --- /dev/null +++ b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerModel/index.ts @@ -0,0 +1,58 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useZigbeeallianceDistributedcomplianceledgerModel() { + const client = useClient(); + const QueryVendorProducts = (vid: string, options: any) => { + const key = { type: 'QueryVendorProducts', vid }; + return useQuery([key], () => { + const { vid } = key + return client.ZigbeeallianceDistributedcomplianceledgerModel.query.queryVendorProducts(vid).then( res => res.data ); + }, options); + } + + const QueryModel = (vid: string, pid: string, options: any) => { + const key = { type: 'QueryModel', vid, pid }; + return useQuery([key], () => { + const { vid, pid } = key + return client.ZigbeeallianceDistributedcomplianceledgerModel.query.queryModel(vid, pid).then( res => res.data ); + }, options); + } + + const QueryModelAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryModelAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerModel.query.queryModelAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryModelVersion = (vid: string, pid: string, softwareVersion: string, options: any) => { + const key = { type: 'QueryModelVersion', vid, pid, softwareVersion }; + return useQuery([key], () => { + const { vid, pid, softwareVersion } = key + return client.ZigbeeallianceDistributedcomplianceledgerModel.query.queryModelVersion(vid, pid, softwareVersion).then( res => res.data ); + }, options); + } + + const QueryModelVersions = (vid: string, pid: string, options: any) => { + const key = { type: 'QueryModelVersions', vid, pid }; + return useQuery([key], () => { + const { vid, pid } = key + return client.ZigbeeallianceDistributedcomplianceledgerModel.query.queryModelVersions(vid, pid).then( res => res.data ); + }, options); + } + + return {QueryVendorProducts,QueryModel,QueryModelAll,QueryModelVersion,QueryModelVersions, + } +} \ No newline at end of file diff --git a/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerPki/index.ts b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerPki/index.ts new file mode 100755 index 000000000..0b64716ab --- /dev/null +++ b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerPki/index.ts @@ -0,0 +1,272 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useZigbeeallianceDistributedcomplianceledgerPki() { + const client = useClient(); + const QueryApprovedCertificates = (subject: string, subjectKeyId: string, options: any) => { + const key = { type: 'QueryApprovedCertificates', subject, subjectKeyId }; + return useQuery([key], () => { + const { subject, subjectKeyId } = key + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryApprovedCertificates(subject, subjectKeyId).then( res => res.data ); + }, options); + } + + const QueryApprovedCertificatesAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryApprovedCertificatesAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryApprovedCertificatesAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryProposedCertificate = (subject: string, subjectKeyId: string, options: any) => { + const key = { type: 'QueryProposedCertificate', subject, subjectKeyId }; + return useQuery([key], () => { + const { subject, subjectKeyId } = key + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryProposedCertificate(subject, subjectKeyId).then( res => res.data ); + }, options); + } + + const QueryProposedCertificateAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryProposedCertificateAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryProposedCertificateAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryChildCertificates = (issuer: string, authorityKeyId: string, options: any) => { + const key = { type: 'QueryChildCertificates', issuer, authorityKeyId }; + return useQuery([key], () => { + const { issuer, authorityKeyId } = key + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryChildCertificates(issuer, authorityKeyId).then( res => res.data ); + }, options); + } + + const QueryProposedCertificateRevocation = (subject: string, subjectKeyId: string, query: any, options: any) => { + const key = { type: 'QueryProposedCertificateRevocation', subject, subjectKeyId, query }; + return useQuery([key], () => { + const { subject, subjectKeyId,query } = key + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryProposedCertificateRevocation(subject, subjectKeyId, query ?? undefined).then( res => res.data ); + }, options); + } + + const QueryProposedCertificateRevocationAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryProposedCertificateRevocationAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryProposedCertificateRevocationAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryRevokedCertificates = (subject: string, subjectKeyId: string, options: any) => { + const key = { type: 'QueryRevokedCertificates', subject, subjectKeyId }; + return useQuery([key], () => { + const { subject, subjectKeyId } = key + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryRevokedCertificates(subject, subjectKeyId).then( res => res.data ); + }, options); + } + + const QueryRevokedCertificatesAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryRevokedCertificatesAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryRevokedCertificatesAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryApprovedRootCertificates = ( options: any) => { + const key = { type: 'QueryApprovedRootCertificates', }; + return useQuery([key], () => { + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryApprovedRootCertificates().then( res => res.data ); + }, options); + } + + const QueryRevokedRootCertificates = ( options: any) => { + const key = { type: 'QueryRevokedRootCertificates', }; + return useQuery([key], () => { + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryRevokedRootCertificates().then( res => res.data ); + }, options); + } + + const QueryApprovedCertificatesBySubject = (subject: string, options: any) => { + const key = { type: 'QueryApprovedCertificatesBySubject', subject }; + return useQuery([key], () => { + const { subject } = key + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryApprovedCertificatesBySubject(subject).then( res => res.data ); + }, options); + } + + const QueryRejectedCertificate = (subject: string, subjectKeyId: string, options: any) => { + const key = { type: 'QueryRejectedCertificate', subject, subjectKeyId }; + return useQuery([key], () => { + const { subject, subjectKeyId } = key + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryRejectedCertificate(subject, subjectKeyId).then( res => res.data ); + }, options); + } + + const QueryRejectedCertificateAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryRejectedCertificateAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryRejectedCertificateAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryPkiRevocationDistributionPoint = (issuerSubjectKeyID: string, vid: string, label: string, options: any) => { + const key = { type: 'QueryPkiRevocationDistributionPoint', issuerSubjectKeyID, vid, label }; + return useQuery([key], () => { + const { issuerSubjectKeyID, vid, label } = key + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryPkiRevocationDistributionPoint(issuerSubjectKeyID, vid, label).then( res => res.data ); + }, options); + } + + const QueryPkiRevocationDistributionPointAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryPkiRevocationDistributionPointAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryPkiRevocationDistributionPointAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryPkiRevocationDistributionPointsByIssuerSubjectKeyID = (issuerSubjectKeyID: string, options: any) => { + const key = { type: 'QueryPkiRevocationDistributionPointsByIssuerSubjectKeyID', issuerSubjectKeyID }; + return useQuery([key], () => { + const { issuerSubjectKeyID } = key + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryPkiRevocationDistributionPointsByIssuerSubjectKeyID(issuerSubjectKeyID).then( res => res.data ); + }, options); + } + + const QueryNocRootCertificates = (vid: string, options: any) => { + const key = { type: 'QueryNocRootCertificates', vid }; + return useQuery([key], () => { + const { vid } = key + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryNocRootCertificates(vid).then( res => res.data ); + }, options); + } + + const QueryNocRootCertificatesByVidAndSkid = (vid: string, subjectKeyId: string, options: any) => { + const key = { type: 'QueryNocRootCertificatesByVidAndSkid', vid, subjectKeyId }; + return useQuery([key], () => { + const { vid, subjectKeyId } = key + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryNocRootCertificatesByVidAndSkid(vid, subjectKeyId).then( res => res.data ); + }, options); + } + + const QueryNocRootCertificatesAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryNocRootCertificatesAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryNocRootCertificatesAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryNocIcaCertificates = (vid: string, options: any) => { + const key = { type: 'QueryNocIcaCertificates', vid }; + return useQuery([key], () => { + const { vid } = key + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryNocIcaCertificates(vid).then( res => res.data ); + }, options); + } + + const QueryNocIcaCertificatesAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryNocIcaCertificatesAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryNocIcaCertificatesAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryRevokedNocRootCertificates = (subject: string, subjectKeyId: string, options: any) => { + const key = { type: 'QueryRevokedNocRootCertificates', subject, subjectKeyId }; + return useQuery([key], () => { + const { subject, subjectKeyId } = key + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryRevokedNocRootCertificates(subject, subjectKeyId).then( res => res.data ); + }, options); + } + + const QueryRevokedNocRootCertificatesAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryRevokedNocRootCertificatesAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerPki.query.queryRevokedNocRootCertificatesAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + return {QueryApprovedCertificates,QueryApprovedCertificatesAll,QueryProposedCertificate,QueryProposedCertificateAll,QueryChildCertificates,QueryProposedCertificateRevocation,QueryProposedCertificateRevocationAll,QueryRevokedCertificates,QueryRevokedCertificatesAll,QueryApprovedRootCertificates,QueryRevokedRootCertificates,QueryApprovedCertificatesBySubject,QueryRejectedCertificate,QueryRejectedCertificateAll,QueryPkiRevocationDistributionPoint,QueryPkiRevocationDistributionPointAll,QueryPkiRevocationDistributionPointsByIssuerSubjectKeyID,QueryNocRootCertificates,QueryNocRootCertificatesByVidAndSkid,QueryNocRootCertificatesAll,QueryNocIcaCertificates,QueryNocIcaCertificatesAll,QueryRevokedNocRootCertificates,QueryRevokedNocRootCertificatesAll, + } +} \ No newline at end of file diff --git a/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerValidator/index.ts b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerValidator/index.ts new file mode 100755 index 000000000..47a54c4d7 --- /dev/null +++ b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerValidator/index.ts @@ -0,0 +1,130 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useZigbeeallianceDistributedcomplianceledgerValidator() { + const client = useClient(); + const QueryValidator = (owner: string, options: any) => { + const key = { type: 'QueryValidator', owner }; + return useQuery([key], () => { + const { owner } = key + return client.ZigbeeallianceDistributedcomplianceledgerValidator.query.queryValidator(owner).then( res => res.data ); + }, options); + } + + const QueryValidatorAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryValidatorAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerValidator.query.queryValidatorAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryLastValidatorPower = (owner: string, options: any) => { + const key = { type: 'QueryLastValidatorPower', owner }; + return useQuery([key], () => { + const { owner } = key + return client.ZigbeeallianceDistributedcomplianceledgerValidator.query.queryLastValidatorPower(owner).then( res => res.data ); + }, options); + } + + const QueryLastValidatorPowerAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryLastValidatorPowerAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerValidator.query.queryLastValidatorPowerAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryProposedDisableValidator = (address: string, options: any) => { + const key = { type: 'QueryProposedDisableValidator', address }; + return useQuery([key], () => { + const { address } = key + return client.ZigbeeallianceDistributedcomplianceledgerValidator.query.queryProposedDisableValidator(address).then( res => res.data ); + }, options); + } + + const QueryProposedDisableValidatorAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryProposedDisableValidatorAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerValidator.query.queryProposedDisableValidatorAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryDisabledValidator = (address: string, options: any) => { + const key = { type: 'QueryDisabledValidator', address }; + return useQuery([key], () => { + const { address } = key + return client.ZigbeeallianceDistributedcomplianceledgerValidator.query.queryDisabledValidator(address).then( res => res.data ); + }, options); + } + + const QueryDisabledValidatorAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryDisabledValidatorAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerValidator.query.queryDisabledValidatorAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + const QueryRejectedDisableValidator = (owner: string, options: any) => { + const key = { type: 'QueryRejectedDisableValidator', owner }; + return useQuery([key], () => { + const { owner } = key + return client.ZigbeeallianceDistributedcomplianceledgerValidator.query.queryRejectedDisableValidator(owner).then( res => res.data ); + }, options); + } + + const QueryRejectedDisableValidatorAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryRejectedDisableValidatorAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerValidator.query.queryRejectedDisableValidatorAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + return {QueryValidator,QueryValidatorAll,QueryLastValidatorPower,QueryLastValidatorPowerAll,QueryProposedDisableValidator,QueryProposedDisableValidatorAll,QueryDisabledValidator,QueryDisabledValidatorAll,QueryRejectedDisableValidator,QueryRejectedDisableValidatorAll, + } +} \ No newline at end of file diff --git a/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerVendorinfo/index.ts b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerVendorinfo/index.ts new file mode 100755 index 000000000..a572c7b40 --- /dev/null +++ b/vue/src/composables/useZigbeeallianceDistributedcomplianceledgerVendorinfo/index.ts @@ -0,0 +1,34 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import { useQuery, type UseQueryOptions, useInfiniteQuery, type UseInfiniteQueryOptions } from "@tanstack/vue-query"; +import { useClient } from '../useClient'; +import type { Ref } from 'vue' + +export default function useZigbeeallianceDistributedcomplianceledgerVendorinfo() { + const client = useClient(); + const QueryVendorInfo = (vendorID: string, options: any) => { + const key = { type: 'QueryVendorInfo', vendorID }; + return useQuery([key], () => { + const { vendorID } = key + return client.ZigbeeallianceDistributedcomplianceledgerVendorinfo.query.queryVendorInfo(vendorID).then( res => res.data ); + }, options); + } + + const QueryVendorInfoAll = (query: any, options: any, perPage: number) => { + const key = { type: 'QueryVendorInfoAll', query }; + return useInfiniteQuery([key], ({pageParam = 1}: { pageParam?: number}) => { + const {query } = key + + query['pagination.limit']=perPage; + query['pagination.offset']= (pageParam-1)*perPage; + query['pagination.count_total']= true; + return client.ZigbeeallianceDistributedcomplianceledgerVendorinfo.query.queryVendorInfoAll(query ?? undefined).then( res => ({...res.data,pageParam}) ); + }, {...options, + getNextPageParam: (lastPage, allPages) => { if ((lastPage.pagination?.total ?? 0) >((lastPage.pageParam ?? 0) * perPage)) {return lastPage.pageParam+1 } else {return undefined}}, + getPreviousPageParam: (firstPage, allPages) => { if (firstPage.pageParam==1) { return undefined } else { return firstPage.pageParam-1}} + } + ); + } + + return {QueryVendorInfo,QueryVendorInfoAll, + } +} \ No newline at end of file diff --git a/vue/src/store/generated/cosmos/cosmos-sdk/cosmos.authz.v1beta1/module/types/tendermint/abci/types.ts b/vue/src/store/generated/cosmos/cosmos-sdk/cosmos.authz.v1beta1/module/types/tendermint/abci/types.ts index bd4813a20..6709f1808 100644 --- a/vue/src/store/generated/cosmos/cosmos-sdk/cosmos.authz.v1beta1/module/types/tendermint/abci/types.ts +++ b/vue/src/store/generated/cosmos/cosmos-sdk/cosmos.authz.v1beta1/module/types/tendermint/abci/types.ts @@ -515,7 +515,7 @@ export interface Evidence { /** * Total voting power of the validator set in case the ABCI application does * not store historical validators. - * https://github.com/tendermint/tendermint/issues/4581 + * https://github.com/cometbft/cometbft/issues/4581 */ totalVotingPower: number } diff --git a/x/common/types/uint16_range.pb.go b/x/common/types/uint16_range.pb.go index a9364123c..0a1c1128b 100644 --- a/x/common/types/uint16_range.pb.go +++ b/x/common/types/uint16_range.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: common/uint16_range.proto +// source: zigbeealliance/distributedcomplianceledger/common/uint16_range.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -32,7 +32,7 @@ func (m *Uint16Range) Reset() { *m = Uint16Range{} } func (m *Uint16Range) String() string { return proto.CompactTextString(m) } func (*Uint16Range) ProtoMessage() {} func (*Uint16Range) Descriptor() ([]byte, []int) { - return fileDescriptor_476afb83b0d37b89, []int{0} + return fileDescriptor_42b7b0236903d486, []int{0} } func (m *Uint16Range) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -79,26 +79,28 @@ func init() { proto.RegisterType((*Uint16Range)(nil), "zigbeealliance.distributedcomplianceledger.common.Uint16Range") } -func init() { proto.RegisterFile("common/uint16_range.proto", fileDescriptor_476afb83b0d37b89) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/common/uint16_range.proto", fileDescriptor_42b7b0236903d486) +} -var fileDescriptor_476afb83b0d37b89 = []byte{ - // 246 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4c, 0xce, 0xcf, 0xcd, - 0xcd, 0xcf, 0xd3, 0x2f, 0xcd, 0xcc, 0x2b, 0x31, 0x34, 0x8b, 0x2f, 0x4a, 0xcc, 0x4b, 0x4f, 0xd5, - 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x32, 0xac, 0xca, 0x4c, 0x4f, 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, - 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, 0x2a, 0x2d, 0x49, - 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, 0xa7, 0x16, 0xe9, 0x41, 0x4c, - 0x91, 0x12, 0x49, 0xcf, 0x4f, 0xcf, 0x07, 0xeb, 0xd6, 0x07, 0xb1, 0x20, 0x06, 0x29, 0x15, 0x72, - 0x71, 0x87, 0x82, 0x8d, 0x0f, 0x02, 0x99, 0x2e, 0x64, 0xc0, 0xc5, 0x9c, 0x9b, 0x99, 0x27, 0xc1, - 0xa8, 0xc0, 0xa8, 0xc1, 0xea, 0x24, 0xf7, 0xe9, 0x9e, 0xbc, 0x54, 0x59, 0x62, 0x4e, 0x66, 0x4a, - 0x62, 0x49, 0xaa, 0x95, 0x52, 0x7a, 0x49, 0xaa, 0xad, 0x81, 0x4e, 0x4e, 0x49, 0xaa, 0xad, 0x99, - 0xa9, 0xa9, 0xb1, 0xa9, 0x52, 0x10, 0x48, 0x29, 0x58, 0x47, 0x62, 0x85, 0x04, 0x13, 0x91, 0x3a, - 0x12, 0x2b, 0x9c, 0x12, 0x4f, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, - 0xc6, 0x09, 0x8f, 0xe5, 0x18, 0x2e, 0x3c, 0x96, 0x63, 0xb8, 0xf1, 0x58, 0x8e, 0x21, 0xca, 0x3d, - 0x3d, 0xb3, 0x24, 0xa3, 0x34, 0x09, 0xe4, 0x62, 0x7d, 0x88, 0x07, 0x75, 0x61, 0x3e, 0xd4, 0x47, - 0xf2, 0xa1, 0x2e, 0xc2, 0x8b, 0xba, 0x10, 0x3f, 0xea, 0x57, 0xe8, 0x43, 0xc3, 0xaa, 0xa4, 0xb2, - 0x20, 0xb5, 0x38, 0x89, 0x0d, 0xec, 0x39, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0xa7, 0xe1, - 0x11, 0x4e, 0x42, 0x01, 0x00, 0x00, +var fileDescriptor_42b7b0236903d486 = []byte{ + // 247 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0xa9, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x27, 0xe7, 0xe7, 0xe6, 0xe6, 0xe7, 0xe9, 0x97, 0x66, 0xe6, 0x95, 0x18, 0x9a, + 0xc5, 0x17, 0x25, 0xe6, 0xa5, 0xa7, 0xea, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x19, 0xa2, 0x9a, + 0xa2, 0x87, 0xc7, 0x14, 0x3d, 0x88, 0x29, 0x52, 0x22, 0xe9, 0xf9, 0xe9, 0xf9, 0x60, 0xdd, 0xfa, + 0x20, 0x16, 0xc4, 0x20, 0xa5, 0x42, 0x2e, 0xee, 0x50, 0xb0, 0xf1, 0x41, 0x20, 0xd3, 0x85, 0x0c, + 0xb8, 0x98, 0x73, 0x33, 0xf3, 0x24, 0x18, 0x15, 0x18, 0x35, 0x58, 0x9d, 0xe4, 0x3e, 0xdd, 0x93, + 0x97, 0x2a, 0x4b, 0xcc, 0xc9, 0x4c, 0x49, 0x2c, 0x49, 0xb5, 0x52, 0x4a, 0x2f, 0x49, 0xb5, 0x35, + 0xd0, 0xc9, 0x29, 0x49, 0xb5, 0x35, 0x33, 0x35, 0x35, 0x36, 0x55, 0x0a, 0x02, 0x29, 0x05, 0xeb, + 0x48, 0xac, 0x90, 0x60, 0x22, 0x52, 0x47, 0x62, 0x85, 0x53, 0xe2, 0x89, 0x47, 0x72, 0x8c, 0x17, + 0x1e, 0xc9, 0x31, 0x3e, 0x78, 0x24, 0xc7, 0x38, 0xe1, 0xb1, 0x1c, 0xc3, 0x85, 0xc7, 0x72, 0x0c, + 0x37, 0x1e, 0xcb, 0x31, 0x44, 0xb9, 0xa7, 0x67, 0x96, 0x64, 0x94, 0x26, 0x81, 0x5c, 0xac, 0x0f, + 0xf1, 0xa0, 0x2e, 0xb6, 0x70, 0xd2, 0x45, 0x78, 0x51, 0x17, 0x1a, 0x52, 0x15, 0xb0, 0xb0, 0x2a, + 0xa9, 0x2c, 0x48, 0x2d, 0x4e, 0x62, 0x03, 0x7b, 0xce, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0xeb, + 0x4a, 0x43, 0x0e, 0x6d, 0x01, 0x00, 0x00, } func (m *Uint16Range) Marshal() (dAtA []byte, err error) { diff --git a/x/compliance/client/cli/query.go b/x/compliance/client/cli/query.go index bba98485d..333fe4732 100644 --- a/x/compliance/client/cli/query.go +++ b/x/compliance/client/cli/query.go @@ -12,7 +12,7 @@ import ( ) // GetQueryCmd returns the cli query commands for this module. -func GetQueryCmd(queryRoute string) *cobra.Command { +func GetQueryCmd(_ string) *cobra.Command { // Group compliance queries under a subcommand cmd := &cobra.Command{ Use: types.ModuleName, diff --git a/x/compliance/client/cli/query_certified_model.go b/x/compliance/client/cli/query_certified_model.go index 856ef66dc..5011f44b1 100644 --- a/x/compliance/client/cli/query_certified_model.go +++ b/x/compliance/client/cli/query_certified_model.go @@ -15,7 +15,10 @@ func CmdListCertifiedModel() *cobra.Command { Use: "all-certified-models", Short: "Query the list of all certified models", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -60,7 +63,11 @@ func CmdShowCertifiedModel() *cobra.Command { "`certification_type`) is compliant", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + var res types.CertifiedModel return cli.QueryWithProof( diff --git a/x/compliance/client/cli/query_certified_model_test.go b/x/compliance/client/cli/query_certified_model_test.go index ea56dde6e..5487ab956 100644 --- a/x/compliance/client/cli/query_certified_model_test.go +++ b/x/compliance/client/cli/query_certified_model_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/client/cli" diff --git a/x/compliance/client/cli/query_compliance_info.go b/x/compliance/client/cli/query_compliance_info.go index 5d202547b..2592f6239 100644 --- a/x/compliance/client/cli/query_compliance_info.go +++ b/x/compliance/client/cli/query_compliance_info.go @@ -6,7 +6,6 @@ import ( "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/flags" "github.com/spf13/cobra" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" ) @@ -16,7 +15,10 @@ func CmdListComplianceInfo() *cobra.Command { Use: "all-compliance-info", Short: "Query the list of all compliance info records", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -60,8 +62,12 @@ func CmdShowComplianceInfo() *cobra.Command { Short: "Query compliance info for Model (identified by the `vid`, `pid`, 'softwareVersion' and `certification_type`)", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) - var res dclcompltypes.ComplianceInfo + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + + var res types.ComplianceInfo return cli.QueryWithProof( clientCtx, diff --git a/x/compliance/client/cli/query_compliance_info_test.go b/x/compliance/client/cli/query_compliance_info_test.go index 85f7408b8..74ee6559a 100644 --- a/x/compliance/client/cli/query_compliance_info_test.go +++ b/x/compliance/client/cli/query_compliance_info_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/client/cli" diff --git a/x/compliance/client/cli/query_device_software_compliance.go b/x/compliance/client/cli/query_device_software_compliance.go index cf7fb5b84..e33814d54 100644 --- a/x/compliance/client/cli/query_device_software_compliance.go +++ b/x/compliance/client/cli/query_device_software_compliance.go @@ -16,7 +16,10 @@ func CmdListDeviceSoftwareCompliance() *cobra.Command { Use: "all-device-software-compliance", Short: "Query the list of all device software compliances", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -50,7 +53,10 @@ func CmdShowDeviceSoftwareCompliance() *cobra.Command { Short: "Query device software compliance for Model (identified by the `cdCertificateId`)", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } argCDCertificateID := viper.GetString(FlagCDCertificateID) diff --git a/x/compliance/client/cli/query_device_software_compliance_test.go b/x/compliance/client/cli/query_device_software_compliance_test.go index 668d23f64..a1c5719eb 100644 --- a/x/compliance/client/cli/query_device_software_compliance_test.go +++ b/x/compliance/client/cli/query_device_software_compliance_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/x/compliance/client/cli/query_provisional_model.go b/x/compliance/client/cli/query_provisional_model.go index 5f121f4b8..b4e37ffa7 100644 --- a/x/compliance/client/cli/query_provisional_model.go +++ b/x/compliance/client/cli/query_provisional_model.go @@ -15,7 +15,10 @@ func CmdListProvisionalModel() *cobra.Command { Use: "all-provisional-models", Short: "Query the list of all provisional models", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -59,7 +62,11 @@ func CmdShowProvisionalModel() *cobra.Command { Short: "Gets a boolean if the given Model (identified by the `vid`, `pid`, `softwareVersion` and `certification_type`) is in provisional state", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + var res types.ProvisionalModel return cli.QueryWithProof( diff --git a/x/compliance/client/cli/query_provisional_model_test.go b/x/compliance/client/cli/query_provisional_model_test.go index 601aa10bc..a8d57769f 100644 --- a/x/compliance/client/cli/query_provisional_model_test.go +++ b/x/compliance/client/cli/query_provisional_model_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/client/cli" diff --git a/x/compliance/client/cli/query_revoked_model.go b/x/compliance/client/cli/query_revoked_model.go index aeb2a82bf..21d4e1b66 100644 --- a/x/compliance/client/cli/query_revoked_model.go +++ b/x/compliance/client/cli/query_revoked_model.go @@ -15,7 +15,10 @@ func CmdListRevokedModel() *cobra.Command { Use: "all-revoked-models", Short: "Query the list of all revoked models", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -59,7 +62,11 @@ func CmdShowRevokedModel() *cobra.Command { Short: "Gets a boolean if the given Model (identified by the `vid`, `pid`, `softwareVersion` and `certification_type`) is revoked", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + var res types.RevokedModel return cli.QueryWithProof( diff --git a/x/compliance/client/cli/query_revoked_model_test.go b/x/compliance/client/cli/query_revoked_model_test.go index f2543be74..240dcd646 100644 --- a/x/compliance/client/cli/query_revoked_model_test.go +++ b/x/compliance/client/cli/query_revoked_model_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/client/cli" diff --git a/x/compliance/genesis_test.go b/x/compliance/genesis_test.go index 6ffbec599..c58480708 100644 --- a/x/compliance/genesis_test.go +++ b/x/compliance/genesis_test.go @@ -5,14 +5,13 @@ import ( "github.com/stretchr/testify/require" keepertest "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/keeper" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" ) func TestGenesis(t *testing.T) { genesisState := types.GenesisState{ - ComplianceInfoList: []dclcompltypes.ComplianceInfo{ + ComplianceInfoList: []types.ComplianceInfo{ { Vid: 0, Pid: 0, diff --git a/x/compliance/handler.go b/x/compliance/handler.go index 55261aecf..00fd5c4c0 100644 --- a/x/compliance/handler.go +++ b/x/compliance/handler.go @@ -3,8 +3,10 @@ package compliance import ( "fmt" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" ) @@ -41,7 +43,7 @@ func NewHandler(k keeper.Keeper) sdk.Handler { default: errMsg := fmt.Sprintf("unrecognized %s message type: %T", types.ModuleName, msg) - return nil, sdkerrors.Wrap(sdkerrors.ErrUnknownRequest, errMsg) + return nil, errors.Wrap(sdkerrors.ErrUnknownRequest, errMsg) } } } diff --git a/x/compliance/handler_certify_model_test.go b/x/compliance/handler_certify_model_test.go index 477207afe..cfe69d97f 100644 --- a/x/compliance/handler_certify_model_test.go +++ b/x/compliance/handler_certify_model_test.go @@ -8,7 +8,6 @@ import ( sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" modeltypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" @@ -20,7 +19,7 @@ func setupCertifyModel(t *testing.T) (*TestSetup, int32, int32, uint32, string, vid, pid, softwareVersion, softwareVersionString := setup.addModelVersion( testconstants.Vid, testconstants.Pid, testconstants.SoftwareVersion, testconstants.SoftwareVersionString) - certificationType := dclcompltypes.ZigbeeCertificationType + certificationType := types.ZigbeeCertificationType return setup, vid, pid, softwareVersion, softwareVersionString, certificationType } @@ -43,7 +42,7 @@ func (setup *TestSetup) checkModelCertified(t *testing.T, certifyModelMsg *types require.False(t, provisionalModel.Value) } -func (setup *TestSetup) checkCertifiedModelMatchesMsg(t *testing.T, certifyModelMsg *types.MsgCertifyModel) *dclcompltypes.ComplianceInfo { +func (setup *TestSetup) checkCertifiedModelMatchesMsg(t *testing.T, certifyModelMsg *types.MsgCertifyModel) *types.ComplianceInfo { t.Helper() vid := certifyModelMsg.Vid @@ -57,7 +56,8 @@ func (setup *TestSetup) checkCertifiedModelMatchesMsg(t *testing.T, certifyModel return receivedComplianceInfo } -func (setup *TestSetup) checkDeviceSoftwareComplianceMatchesComplianceInfo(t *testing.T, complianceInfo *dclcompltypes.ComplianceInfo) { +func (setup *TestSetup) checkDeviceSoftwareComplianceMatchesComplianceInfo(t *testing.T, complianceInfo *types.ComplianceInfo) { + t.Helper() receivedDeviceSoftwareCompliance, _ := queryDeviceSoftwareCompliance(setup, complianceInfo.CDCertificateId) require.Equal(t, receivedDeviceSoftwareCompliance.CDCertificateId, complianceInfo.CDCertificateId) checkDeviceSoftwareCompliance(t, receivedDeviceSoftwareCompliance.ComplianceInfo[0], complianceInfo) @@ -89,7 +89,7 @@ func TestHandler_CertifyModel_Zigbee_WithAllOptionalFlags(t *testing.T) { func TestHandler_CertifyModel_Matter(t *testing.T) { setup, vid, pid, softwareVersion, softwareVersionString, _ := setupCertifyModel(t) - certificationType := dclcompltypes.MatterCertificationType + certificationType := types.MatterCertificationType certifyModelMsg := newMsgCertifyModel(vid, pid, softwareVersion, softwareVersionString, certificationType, setup.CertificationCenter) _, certifyModelErr := setup.Handler(setup.Ctx, certifyModelMsg) @@ -102,7 +102,7 @@ func TestHandler_CertifyModel_Matter(t *testing.T) { func TestHandler_CertifyModel_Matter_WithAllOptionalFlags(t *testing.T) { setup, vid, pid, softwareVersion, softwareVersionString, _ := setupCertifyModel(t) - certificationType := dclcompltypes.MatterCertificationType + certificationType := types.MatterCertificationType certifyModelMsg := newMsgCertifyModelWithAllOptionalFlags(vid, pid, softwareVersion, softwareVersionString, certificationType, setup.CertificationCenter) _, certifyModelErr := setup.Handler(setup.Ctx, certifyModelMsg) @@ -127,7 +127,7 @@ func TestHandler_CertifyProvisionedModel(t *testing.T) { setup.checkModelCertified(t, certifyModelMsg) setup.checkDeviceSoftwareComplianceMatchesComplianceInfo(t, complianceInfo) require.Equal(t, 1, len(complianceInfo.History)) - require.Equal(t, dclcompltypes.CodeProvisional, complianceInfo.History[0].SoftwareVersionCertificationStatus) + require.Equal(t, types.CodeProvisional, complianceInfo.History[0].SoftwareVersionCertificationStatus) require.Equal(t, provisionModelMsg.ProvisionalDate, complianceInfo.History[0].Date) } @@ -145,7 +145,7 @@ func TestHandler_CertifyProvisionedModelWithAllOptionalFlags(t *testing.T) { setup.checkModelCertified(t, certifyModelMsg) setup.checkDeviceSoftwareComplianceMatchesComplianceInfo(t, complianceInfo) require.Equal(t, 1, len(complianceInfo.History)) - require.Equal(t, dclcompltypes.CodeProvisional, complianceInfo.History[0].SoftwareVersionCertificationStatus) + require.Equal(t, types.CodeProvisional, complianceInfo.History[0].SoftwareVersionCertificationStatus) require.Equal(t, provisionModelMsg.ProvisionalDate, complianceInfo.History[0].Date) } @@ -261,7 +261,7 @@ func TestHandler_CertifyRevokedModel(t *testing.T) { receivedComplianceInfo := setup.checkCertifiedModelMatchesMsg(t, certifyModelMsg) require.Equal(t, 1, len(receivedComplianceInfo.History)) - require.Equal(t, dclcompltypes.CodeRevoked, receivedComplianceInfo.History[0].SoftwareVersionCertificationStatus) + require.Equal(t, types.CodeRevoked, receivedComplianceInfo.History[0].SoftwareVersionCertificationStatus) require.Equal(t, revokeModelMsg.RevocationDate, receivedComplianceInfo.History[0].Date) receivedDeviceSoftwareCompliance, _ := queryDeviceSoftwareCompliance(setup, testconstants.CDCertificateID) diff --git a/x/compliance/handler_delete_compliance_info_test.go b/x/compliance/handler_delete_compliance_info_test.go index ecb7b4b0f..049894fce 100644 --- a/x/compliance/handler_delete_compliance_info_test.go +++ b/x/compliance/handler_delete_compliance_info_test.go @@ -6,16 +6,16 @@ import ( sdk "github.com/cosmos/cosmos-sdk/types" "github.com/stretchr/testify/require" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" ) func setupDeleteComplianceInfo(t *testing.T) (*TestSetup, int32, int32, uint32, string, string) { + t.Helper() setup := setup(t) vid, pid, softwareVersion, softwareVersionString := setup.addModelVersion( testconstants.Vid, testconstants.Pid, testconstants.SoftwareVersion, testconstants.SoftwareVersionString) - certificationType := dclcompltypes.ZigbeeCertificationType + certificationType := types.ZigbeeCertificationType return setup, vid, pid, softwareVersion, softwareVersionString, certificationType } @@ -29,6 +29,7 @@ func (setup *TestSetup) deleteComplianceInfo(vid int32, pid int32, softwareVersi } func (setup *TestSetup) checkComplianceInfoDeleted(t *testing.T, deleteComplInfoMsg *types.MsgDeleteComplianceInfo) { + t.Helper() vid := deleteComplInfoMsg.Vid pid := deleteComplInfoMsg.Pid softwareVersion := deleteComplInfoMsg.SoftwareVersion diff --git a/x/compliance/handler_provision_model_test.go b/x/compliance/handler_provision_model_test.go index 363d23ff8..663366092 100644 --- a/x/compliance/handler_provision_model_test.go +++ b/x/compliance/handler_provision_model_test.go @@ -8,17 +8,17 @@ import ( sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) func setupProvisionModel(t *testing.T) (*TestSetup, int32, int32, uint32, string, string) { + t.Helper() setup := setup(t) vid, pid, softwareVersion, softwareVersionString := setup.addModelVersion( testconstants.Vid, testconstants.Pid, testconstants.SoftwareVersion, testconstants.SoftwareVersionString) - certificationType := dclcompltypes.ZigbeeCertificationType + certificationType := types.ZigbeeCertificationType return setup, vid, pid, softwareVersion, softwareVersionString, certificationType } @@ -49,6 +49,7 @@ func (setup *TestSetup) provisionModelWithAllOptionalFlags(vid int32, pid int32, } func (setup *TestSetup) checkComplianceInfoEqualsProvisionModelMsgData(t *testing.T, provisionModelMsg *types.MsgProvisionModel) { + t.Helper() vid := provisionModelMsg.Vid pid := provisionModelMsg.Pid softwareVersion := provisionModelMsg.SoftwareVersion @@ -60,6 +61,7 @@ func (setup *TestSetup) checkComplianceInfoEqualsProvisionModelMsgData(t *testin } func (setup *TestSetup) checkModelProvisioned(t *testing.T, provisionModelMsg *types.MsgProvisionModel) { + t.Helper() vid := provisionModelMsg.Vid pid := provisionModelMsg.Pid softwareVersion := provisionModelMsg.SoftwareVersion @@ -153,7 +155,7 @@ func TestHandler_ProvisionModel_AlreadyRevoked(t *testing.T) { func TestHandler_ProvisionModel_MoreThanOneModel(t *testing.T) { setup := setup(t) - certificationType := dclcompltypes.ZigbeeCertificationType + certificationType := types.ZigbeeCertificationType modelVersionsQuantity := 5 for i := 1; i < modelVersionsQuantity; i++ { diff --git a/x/compliance/handler_revoke_model_test.go b/x/compliance/handler_revoke_model_test.go index ae7bd24b4..c8a99e2d4 100644 --- a/x/compliance/handler_revoke_model_test.go +++ b/x/compliance/handler_revoke_model_test.go @@ -9,18 +9,18 @@ import ( sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" modeltypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" ) func setupRevokeModel(t *testing.T) (*TestSetup, int32, int32, uint32, string, string) { + t.Helper() setup := setup(t) vid, pid, softwareVersion, softwareVersionString := setup.addModelVersion( testconstants.Vid, testconstants.Pid, testconstants.SoftwareVersion, testconstants.SoftwareVersionString) - certificationType := dclcompltypes.ZigbeeCertificationType + certificationType := types.ZigbeeCertificationType return setup, vid, pid, softwareVersion, softwareVersionString, certificationType } @@ -33,6 +33,7 @@ func (setup *TestSetup) revokeModel(vid int32, pid int32, softwareVersion uint32 } func (setup *TestSetup) checkModelRevoked(t *testing.T, revokeModelMsg *types.MsgRevokeModel) { + t.Helper() vid := revokeModelMsg.Vid pid := revokeModelMsg.Pid softwareVersion := revokeModelMsg.SoftwareVersion @@ -49,6 +50,7 @@ func (setup *TestSetup) checkModelRevoked(t *testing.T, revokeModelMsg *types.Ms } func (setup *TestSetup) checkModelStatusChangedToRevoked(t *testing.T, revokeModelMsg *types.MsgRevokeModel) { + t.Helper() vid := revokeModelMsg.Vid pid := revokeModelMsg.Pid softwareVersion := revokeModelMsg.SoftwareVersion @@ -64,7 +66,8 @@ func (setup *TestSetup) checkModelStatusChangedToRevoked(t *testing.T, revokeMod require.False(t, certifiedModel.Value) } -func (setup *TestSetup) checkRevokedModelInfoEqualsMessageData(t *testing.T, revokeModelMsg *types.MsgRevokeModel) *dclcompltypes.ComplianceInfo { +func (setup *TestSetup) checkRevokedModelInfoEqualsMessageData(t *testing.T, revokeModelMsg *types.MsgRevokeModel) *types.ComplianceInfo { + t.Helper() vid := revokeModelMsg.Vid pid := revokeModelMsg.Pid softwareVersion := revokeModelMsg.SoftwareVersion @@ -101,7 +104,7 @@ func TestHandler_RevokeCertifiedModel(t *testing.T) { complianceInfo := setup.checkRevokedModelInfoEqualsMessageData(t, revokeModelMsg) require.Equal(t, 1, len(complianceInfo.History)) - require.Equal(t, dclcompltypes.CodeCertified, complianceInfo.History[0].SoftwareVersionCertificationStatus) + require.Equal(t, types.CodeCertified, complianceInfo.History[0].SoftwareVersionCertificationStatus) require.Equal(t, certifyModelMsg.CertificationDate, complianceInfo.History[0].Date) setup.checkModelStatusChangedToRevoked(t, revokeModelMsg) @@ -118,7 +121,7 @@ func TestHandler_RevokeProvisionedModel(t *testing.T) { complianceInfo := setup.checkRevokedModelInfoEqualsMessageData(t, revokeModelMsg) require.Equal(t, 1, len(complianceInfo.History)) - require.Equal(t, dclcompltypes.CodeProvisional, complianceInfo.History[0].SoftwareVersionCertificationStatus) + require.Equal(t, types.CodeProvisional, complianceInfo.History[0].SoftwareVersionCertificationStatus) require.Equal(t, provisionModelMsg.ProvisionalDate, complianceInfo.History[0].Date) setup.checkModelStatusChangedToRevoked(t, revokeModelMsg) @@ -174,7 +177,7 @@ func TestHandler_RevokeModelTwice(t *testing.T) { func TestHandler_RevokeDifferentModels(t *testing.T) { setup := setup(t) - certificationType := dclcompltypes.ZigbeeCertificationType + certificationType := types.ZigbeeCertificationType modelVersionsQuantity := 5 for i := 1; i < modelVersionsQuantity; i++ { @@ -271,8 +274,8 @@ func TestHandler_CertifyRevokedModelThatWasCertifiedEarlier(t *testing.T) { setup.checkModelCertified(t, secondCertifyModelMsg) require.Equal(t, 2, len(complianceInfo.History)) - require.Equal(t, dclcompltypes.CodeCertified, complianceInfo.History[0].SoftwareVersionCertificationStatus) + require.Equal(t, types.CodeCertified, complianceInfo.History[0].SoftwareVersionCertificationStatus) require.Equal(t, certifyModelMsg.CertificationDate, complianceInfo.History[0].Date) - require.Equal(t, dclcompltypes.CodeRevoked, complianceInfo.History[1].SoftwareVersionCertificationStatus) + require.Equal(t, types.CodeRevoked, complianceInfo.History[1].SoftwareVersionCertificationStatus) require.Equal(t, revokeModelMsg.RevocationDate, complianceInfo.History[1].Date) } diff --git a/x/compliance/handler_test.go b/x/compliance/handler_test.go index 13ee37ae7..53eae5bd5 100644 --- a/x/compliance/handler_test.go +++ b/x/compliance/handler_test.go @@ -11,7 +11,6 @@ import ( "github.com/stretchr/testify/require" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" testkeeper "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/keeper" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" @@ -64,7 +63,7 @@ type TestSetup struct { Handler sdk.Handler // Querier sdk.Querier CertificationCenter sdk.AccAddress - CertificationTypes dclcompltypes.CertificationTypes + CertificationTypes types.CertificationTypes } func (setup *TestSetup) addAccount( @@ -112,7 +111,7 @@ func setup(t *testing.T) *TestSetup { certificationCenter := generateAccAddress() - certificationTypes := dclcompltypes.CertificationTypes{dclcompltypes.ZigbeeCertificationType, dclcompltypes.MatterCertificationType} + certificationTypes := types.CertificationTypes{types.ZigbeeCertificationType, types.MatterCertificationType} setup := &TestSetup{ T: t, @@ -137,7 +136,7 @@ func queryComplianceInfo( pid int32, softwareVersion uint32, certificationType string, -) (*dclcompltypes.ComplianceInfo, error) { +) (*types.ComplianceInfo, error) { req := &types.QueryGetComplianceInfoRequest{ Vid: vid, Pid: pid, @@ -163,7 +162,7 @@ func queryExistingComplianceInfo( pid int32, softwareVersion uint32, certificationType string, -) *dclcompltypes.ComplianceInfo { +) *types.ComplianceInfo { complianceInfo, err := queryComplianceInfo(setup, vid, pid, softwareVersion, certificationType) require.NoError(setup.T, err) @@ -281,12 +280,12 @@ func queryRevokedModel( func checkProvisionalModelInfo( t *testing.T, provisionalModelMsg *types.MsgProvisionModel, - receivedComplianceInfo *dclcompltypes.ComplianceInfo, + receivedComplianceInfo *types.ComplianceInfo, ) { t.Helper() require.Equal(t, provisionalModelMsg.Vid, receivedComplianceInfo.Vid) require.Equal(t, provisionalModelMsg.Pid, receivedComplianceInfo.Pid) - require.Equal(t, dclcompltypes.CodeProvisional, receivedComplianceInfo.SoftwareVersionCertificationStatus) + require.Equal(t, types.CodeProvisional, receivedComplianceInfo.SoftwareVersionCertificationStatus) require.Equal(t, provisionalModelMsg.ProvisionalDate, receivedComplianceInfo.Date) require.Equal(t, provisionalModelMsg.Reason, receivedComplianceInfo.Reason) require.Equal(t, provisionalModelMsg.CertificationType, receivedComplianceInfo.CertificationType) @@ -307,12 +306,12 @@ func checkProvisionalModelInfo( func checkCertifiedModelInfo( t *testing.T, certifyModelMsg *types.MsgCertifyModel, - receivedComplianceInfo *dclcompltypes.ComplianceInfo, + receivedComplianceInfo *types.ComplianceInfo, ) { t.Helper() require.Equal(t, certifyModelMsg.Vid, receivedComplianceInfo.Vid) require.Equal(t, certifyModelMsg.Pid, receivedComplianceInfo.Pid) - require.Equal(t, dclcompltypes.CodeCertified, receivedComplianceInfo.SoftwareVersionCertificationStatus) + require.Equal(t, types.CodeCertified, receivedComplianceInfo.SoftwareVersionCertificationStatus) require.Equal(t, certifyModelMsg.CertificationDate, receivedComplianceInfo.Date) require.Equal(t, certifyModelMsg.Reason, receivedComplianceInfo.Reason) require.Equal(t, certifyModelMsg.CertificationType, receivedComplianceInfo.CertificationType) @@ -332,8 +331,8 @@ func checkCertifiedModelInfo( func checkDeviceSoftwareCompliance( t *testing.T, - info *dclcompltypes.ComplianceInfo, - receivedComplianceInfo *dclcompltypes.ComplianceInfo, + info *types.ComplianceInfo, + receivedComplianceInfo *types.ComplianceInfo, ) { t.Helper() require.Equal(t, info.Vid, receivedComplianceInfo.Vid) @@ -360,18 +359,19 @@ func checkDeviceSoftwareCompliance( func checkRevokedModelInfo( t *testing.T, revokeModelMsg *types.MsgRevokeModel, - receivedComplianceInfo *dclcompltypes.ComplianceInfo, + receivedComplianceInfo *types.ComplianceInfo, ) { t.Helper() require.Equal(t, revokeModelMsg.Vid, receivedComplianceInfo.Vid) require.Equal(t, revokeModelMsg.Pid, receivedComplianceInfo.Pid) - require.Equal(t, dclcompltypes.CodeRevoked, receivedComplianceInfo.SoftwareVersionCertificationStatus) + require.Equal(t, types.CodeRevoked, receivedComplianceInfo.SoftwareVersionCertificationStatus) require.Equal(t, revokeModelMsg.RevocationDate, receivedComplianceInfo.Date) require.Equal(t, revokeModelMsg.Reason, receivedComplianceInfo.Reason) require.Equal(t, revokeModelMsg.CertificationType, receivedComplianceInfo.CertificationType) } func assertNotFound(t *testing.T, err error) { + t.Helper() require.Error(t, err) require.Equal(t, codes.NotFound, status.Code(err)) } diff --git a/x/compliance/handler_update_compliance_info_test.go b/x/compliance/handler_update_compliance_info_test.go index e9dc107fa..ca5b308bc 100644 --- a/x/compliance/handler_update_compliance_info_test.go +++ b/x/compliance/handler_update_compliance_info_test.go @@ -7,12 +7,12 @@ import ( "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) -func (setup *TestSetup) checkAllComplianceInfoFieldsUpdated(t *testing.T, originalComplianceInfo *dclcompltypes.ComplianceInfo, updatedComplianceInfo *dclcompltypes.ComplianceInfo) { +func (setup *TestSetup) checkAllComplianceInfoFieldsUpdated(t *testing.T, originalComplianceInfo *types.ComplianceInfo, updatedComplianceInfo *types.ComplianceInfo) { + t.Helper() require.Equal(t, originalComplianceInfo.Vid, updatedComplianceInfo.Vid) require.Equal(t, originalComplianceInfo.Pid, updatedComplianceInfo.Pid) require.Equal(t, originalComplianceInfo.SoftwareVersion, updatedComplianceInfo.SoftwareVersion) @@ -34,7 +34,8 @@ func (setup *TestSetup) checkAllComplianceInfoFieldsUpdated(t *testing.T, origin require.Equal(t, originalComplianceInfo.SchemaVersion, updatedComplianceInfo.SchemaVersion) } -func (setup *TestSetup) checkDeviceSoftwareComplianceUpdated(t *testing.T, originalComplianceInfo *dclcompltypes.ComplianceInfo, updatedDeviceSoftwareCompliance *types.DeviceSoftwareCompliance, isUpdatedMinimally bool) { +func (setup *TestSetup) checkDeviceSoftwareComplianceUpdated(t *testing.T, originalComplianceInfo *types.ComplianceInfo, updatedDeviceSoftwareCompliance *types.DeviceSoftwareCompliance, isUpdatedMinimally bool) { + t.Helper() isFound := false for _, complianceInfo := range updatedDeviceSoftwareCompliance.ComplianceInfo { @@ -58,12 +59,13 @@ func (setup *TestSetup) checkDeviceSoftwareComplianceUpdated(t *testing.T, origi require.True(t, isFound) } -func setupUpdateComplianceInfo(t *testing.T) (*TestSetup, int32, int32, uint32, string, string, *dclcompltypes.ComplianceInfo, *types.DeviceSoftwareCompliance) { +func setupUpdateComplianceInfo(t *testing.T) (*TestSetup, int32, int32, uint32, string, string, *types.ComplianceInfo, *types.DeviceSoftwareCompliance) { + t.Helper() setup := setup(t) vid, pid, softwareVersion, softwareVersionString := setup.addModelVersion( testconstants.Vid, testconstants.Pid, testconstants.SoftwareVersion, testconstants.SoftwareVersionString) - certificationType := dclcompltypes.ZigbeeCertificationType + certificationType := types.ZigbeeCertificationType certifyModelMsg := newMsgCertifyModel(vid, pid, softwareVersion, softwareVersionString, certificationType, setup.CertificationCenter) _, certifyModelErr := setup.Handler(setup.Ctx, certifyModelMsg) @@ -148,7 +150,7 @@ func TestHandler_UpdateComplianceInfo_NotByCertificationCenter(t *testing.T) { _, updateComplianceInfoErr := setup.Handler(setup.Ctx, updateComplianceInfoMsg) require.ErrorIs(t, updateComplianceInfoErr, sdkerrors.ErrUnauthorized) - updatedComplianceInfo := queryExistingComplianceInfo(setup, vid, pid, softwareVersion, dclcompltypes.ZigbeeCertificationType) + updatedComplianceInfo := queryExistingComplianceInfo(setup, vid, pid, softwareVersion, types.ZigbeeCertificationType) require.Equal(t, originalComplianceInfo, updatedComplianceInfo) } diff --git a/x/compliance/keeper/compliance_info.go b/x/compliance/keeper/compliance_info.go index f8a6b0ee1..c1f7ac36e 100644 --- a/x/compliance/keeper/compliance_info.go +++ b/x/compliance/keeper/compliance_info.go @@ -3,12 +3,12 @@ package keeper import ( "github.com/cosmos/cosmos-sdk/store/prefix" sdk "github.com/cosmos/cosmos-sdk/types" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" + "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" ) // SetComplianceInfo set a specific complianceInfo in the store from its index. -func (k Keeper) SetComplianceInfo(ctx sdk.Context, complianceInfo dclcompltypes.ComplianceInfo) { +func (k Keeper) SetComplianceInfo(ctx sdk.Context, complianceInfo types.ComplianceInfo) { store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.ComplianceInfoKeyPrefix)) b := k.cdc.MustMarshal(&complianceInfo) store.Set(types.ComplianceInfoKey( @@ -26,7 +26,7 @@ func (k Keeper) GetComplianceInfo( pid int32, softwareVersion uint32, certificationType string, -) (val dclcompltypes.ComplianceInfo, found bool) { +) (val types.ComplianceInfo, found bool) { store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.ComplianceInfoKeyPrefix)) b := store.Get(types.ComplianceInfoKey( @@ -62,14 +62,14 @@ func (k Keeper) RemoveComplianceInfo( } // GetAllComplianceInfo returns all complianceInfo. -func (k Keeper) GetAllComplianceInfo(ctx sdk.Context) (list []dclcompltypes.ComplianceInfo) { +func (k Keeper) GetAllComplianceInfo(ctx sdk.Context) (list []types.ComplianceInfo) { store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.ComplianceInfoKeyPrefix)) iterator := sdk.KVStorePrefixIterator(store, []byte{}) defer iterator.Close() for ; iterator.Valid(); iterator.Next() { - var val dclcompltypes.ComplianceInfo + var val types.ComplianceInfo k.cdc.MustUnmarshal(iterator.Value(), &val) list = append(list, val) } diff --git a/x/compliance/keeper/compliance_info_test.go b/x/compliance/keeper/compliance_info_test.go index 0f68cc32e..9039d62c0 100644 --- a/x/compliance/keeper/compliance_info_test.go +++ b/x/compliance/keeper/compliance_info_test.go @@ -8,8 +8,8 @@ import ( "github.com/stretchr/testify/require" keepertest "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/keeper" + dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" ) // Prevent strconv unused error. diff --git a/x/compliance/keeper/grpc_query_compliance_info.go b/x/compliance/keeper/grpc_query_compliance_info.go index 36284e817..50d252fbf 100644 --- a/x/compliance/keeper/grpc_query_compliance_info.go +++ b/x/compliance/keeper/grpc_query_compliance_info.go @@ -6,12 +6,10 @@ import ( "github.com/cosmos/cosmos-sdk/store/prefix" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/query" - "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" - - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" + + "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" ) func (k Keeper) ComplianceInfoAll(c context.Context, req *types.QueryAllComplianceInfoRequest) (*types.QueryAllComplianceInfoResponse, error) { @@ -19,14 +17,14 @@ func (k Keeper) ComplianceInfoAll(c context.Context, req *types.QueryAllComplian return nil, status.Error(codes.InvalidArgument, "invalid request") } - var complianceInfos []dclcompltypes.ComplianceInfo + var complianceInfos []types.ComplianceInfo ctx := sdk.UnwrapSDKContext(c) store := ctx.KVStore(k.storeKey) complianceInfoStore := prefix.NewStore(store, types.KeyPrefix(types.ComplianceInfoKeyPrefix)) pageRes, err := query.Paginate(complianceInfoStore, req.Pagination, func(key []byte, value []byte) error { - var complianceInfo dclcompltypes.ComplianceInfo + var complianceInfo types.ComplianceInfo if err := k.cdc.Unmarshal(value, &complianceInfo); err != nil { return err } diff --git a/x/compliance/keeper/keeper.go b/x/compliance/keeper/keeper.go index 4d016660e..4db67a308 100644 --- a/x/compliance/keeper/keeper.go +++ b/x/compliance/keeper/keeper.go @@ -3,17 +3,19 @@ package keeper import ( "fmt" + "github.com/cometbft/cometbft/libs/log" "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/tendermint/tendermint/libs/log" + "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" ) type ( Keeper struct { cdc codec.BinaryCodec - storeKey sdk.StoreKey - memKey sdk.StoreKey + storeKey storetypes.StoreKey + memKey storetypes.StoreKey dclauthKeeper types.DclauthKeeper modelKeeper types.ModelKeeper @@ -23,7 +25,7 @@ type ( func NewKeeper( cdc codec.BinaryCodec, storeKey, - memKey sdk.StoreKey, + memKey storetypes.StoreKey, dclauthKeeper types.DclauthKeeper, modelKeeper types.ModelKeeper, ) *Keeper { diff --git a/x/compliance/keeper/msg_server_certify_model.go b/x/compliance/keeper/msg_server_certify_model.go index dc91b3d6f..d867d25f2 100644 --- a/x/compliance/keeper/msg_server_certify_model.go +++ b/x/compliance/keeper/msg_server_certify_model.go @@ -5,9 +5,10 @@ import ( "fmt" "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" + "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" modeltypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" @@ -18,13 +19,13 @@ func (k msgServer) CertifyModel(goCtx context.Context, msg *types.MsgCertifyMode signerAddr, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if sender has enough rights to certify model // sender must have CertificationCenter role to certify/revoke model if !k.dclauthKeeper.HasRole(ctx, signerAddr, dclauthtypes.CertificationCenter) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgAddTestingResult transaction should be signed by an account with the %s role", dclauthtypes.CertificationCenter, ) @@ -57,7 +58,7 @@ func (k msgServer) CertifyModel(goCtx context.Context, msg *types.MsgCertifyMode // and so the test results are not required to be present. // check if compliance is already in certified state - if complianceInfo.SoftwareVersionCertificationStatus == dclcompltypes.CodeCertified { + if complianceInfo.SoftwareVersionCertificationStatus == types.CodeCertified { return nil, types.NewErrAlreadyCertified(msg.Vid, msg.Pid, msg.SoftwareVersion, msg.CertificationType) } @@ -81,7 +82,7 @@ func (k msgServer) CertifyModel(goCtx context.Context, msg *types.MsgCertifyMode } else { // There is no compliance record yet. So certification will be tracked on ledger. - complianceInfo = dclcompltypes.ComplianceInfo{ + complianceInfo = types.ComplianceInfo{ Vid: msg.Vid, Pid: msg.Pid, SoftwareVersion: msg.SoftwareVersion, @@ -90,15 +91,15 @@ func (k msgServer) CertifyModel(goCtx context.Context, msg *types.MsgCertifyMode Date: msg.CertificationDate, Reason: msg.Reason, Owner: msg.Signer, - SoftwareVersionCertificationStatus: dclcompltypes.CodeCertified, - History: []*dclcompltypes.ComplianceHistoryItem{}, + SoftwareVersionCertificationStatus: types.CodeCertified, + History: []*types.ComplianceHistoryItem{}, CDVersionNumber: msg.CDVersionNumber, CDCertificateId: msg.CDCertificateId, SchemaVersion: msg.SchemaVersion, } } - optionalFields := &dclcompltypes.OptionalFields{ + optionalFields := &types.OptionalFields{ ProgramTypeVersion: msg.ProgramTypeVersion, FamilyID: msg.FamilyId, SupportedClusters: msg.SupportedClusters, diff --git a/x/compliance/keeper/msg_server_delete_compliance_info.go b/x/compliance/keeper/msg_server_delete_compliance_info.go index 5ae616867..6eda5ff5c 100644 --- a/x/compliance/keeper/msg_server_delete_compliance_info.go +++ b/x/compliance/keeper/msg_server_delete_compliance_info.go @@ -3,8 +3,10 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) @@ -14,13 +16,13 @@ func (k msgServer) DeleteComplianceInfo(goCtx context.Context, msg *types.MsgDel signerAddr, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if sender has enough rights to delete model // sender must have CertificationCenter role to certify/revoke model if !k.dclauthKeeper.HasRole(ctx, signerAddr, dclauthtypes.CertificationCenter) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgDeleteComplianceInfo transaction should be signed by an account with the %s role", dclauthtypes.CertificationCenter, ) diff --git a/x/compliance/keeper/msg_server_provision_model.go b/x/compliance/keeper/msg_server_provision_model.go index 1db37fb43..ef83d0c8a 100644 --- a/x/compliance/keeper/msg_server_provision_model.go +++ b/x/compliance/keeper/msg_server_provision_model.go @@ -3,9 +3,10 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" + "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" modeltypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" @@ -16,13 +17,13 @@ func (k msgServer) ProvisionModel(goCtx context.Context, msg *types.MsgProvision signerAddr, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if sender has enough rights to provision model // sender must have CertificationCenter role to certify/revoke model if !k.dclauthKeeper.HasRole(ctx, signerAddr, dclauthtypes.CertificationCenter) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgAddTestingResult transaction should be signed by an account with the %s role", dclauthtypes.CertificationCenter, ) @@ -32,11 +33,11 @@ func (k msgServer) ProvisionModel(goCtx context.Context, msg *types.MsgProvision complianceInfo, found := k.GetComplianceInfo(ctx, msg.Vid, msg.Pid, msg.SoftwareVersion, msg.CertificationType) if found { switch status := complianceInfo.SoftwareVersionCertificationStatus; status { - case dclcompltypes.CodeProvisional: + case types.CodeProvisional: return nil, types.NewErrAlreadyProvisional(msg.Vid, msg.Pid, msg.SoftwareVersion, msg.CertificationType) - case dclcompltypes.CodeCertified: + case types.CodeCertified: return nil, types.NewErrAlreadyCertified(msg.Vid, msg.Pid, msg.SoftwareVersion, msg.CertificationType) - case dclcompltypes.CodeRevoked: + case types.CodeRevoked: return nil, types.NewErrAlreadyRevoked(msg.Vid, msg.Pid, msg.SoftwareVersion, msg.CertificationType) default: return nil, types.NewErrComplianceInfoAlreadyExist(msg.Vid, msg.Pid, msg.SoftwareVersion, msg.CertificationType) @@ -57,7 +58,7 @@ func (k msgServer) ProvisionModel(goCtx context.Context, msg *types.MsgProvision return nil, types.NewErrModelVersionCDVersionNumberDoesNotMatch(msg.Vid, msg.Pid, msg.SoftwareVersion, msg.CDVersionNumber) } - complianceInfo = dclcompltypes.ComplianceInfo{ + complianceInfo = types.ComplianceInfo{ Vid: msg.Vid, Pid: msg.Pid, SoftwareVersion: msg.SoftwareVersion, @@ -66,8 +67,8 @@ func (k msgServer) ProvisionModel(goCtx context.Context, msg *types.MsgProvision Date: msg.ProvisionalDate, Reason: msg.Reason, Owner: msg.Signer, - SoftwareVersionCertificationStatus: dclcompltypes.CodeProvisional, - History: []*dclcompltypes.ComplianceHistoryItem{}, + SoftwareVersionCertificationStatus: types.CodeProvisional, + History: []*types.ComplianceHistoryItem{}, CDVersionNumber: msg.CDVersionNumber, ProgramTypeVersion: msg.ProgramTypeVersion, CDCertificateId: msg.CDCertificateId, diff --git a/x/compliance/keeper/msg_server_revoke_model.go b/x/compliance/keeper/msg_server_revoke_model.go index 202e6a4d6..eb8aacc68 100644 --- a/x/compliance/keeper/msg_server_revoke_model.go +++ b/x/compliance/keeper/msg_server_revoke_model.go @@ -5,9 +5,10 @@ import ( "fmt" "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" + "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" modeltypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" @@ -18,13 +19,13 @@ func (k msgServer) RevokeModel(goCtx context.Context, msg *types.MsgRevokeModel) signerAddr, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if sender has enough rights to revoke model // sender must have CertificationCenter role to certify/revoke model if !k.dclauthKeeper.HasRole(ctx, signerAddr, dclauthtypes.CertificationCenter) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgAddTestingResult transaction should be signed by an account with the %s role", dclauthtypes.CertificationCenter, ) @@ -53,7 +54,7 @@ func (k msgServer) RevokeModel(goCtx context.Context, msg *types.MsgRevokeModel) // 2) We want to revoke certified or provisioned compliance. // check if compliance is already in revoked state - if complianceInfo.SoftwareVersionCertificationStatus == dclcompltypes.CodeRevoked { + if complianceInfo.SoftwareVersionCertificationStatus == types.CodeRevoked { return nil, types.NewErrAlreadyRevoked(msg.Vid, msg.Pid, msg.SoftwareVersion, msg.CertificationType) } // if state changes on `revoked` check that revocation date is after certification/provisional date @@ -110,7 +111,7 @@ func (k msgServer) RevokeModel(goCtx context.Context, msg *types.MsgRevokeModel) } else { // There is no compliance record yet. So only revocation will be tracked on ledger. - complianceInfo = dclcompltypes.ComplianceInfo{ + complianceInfo = types.ComplianceInfo{ Vid: msg.Vid, Pid: msg.Pid, SoftwareVersion: msg.SoftwareVersion, @@ -119,8 +120,8 @@ func (k msgServer) RevokeModel(goCtx context.Context, msg *types.MsgRevokeModel) Date: msg.RevocationDate, Reason: msg.Reason, Owner: msg.Signer, - SoftwareVersionCertificationStatus: dclcompltypes.CodeRevoked, - History: []*dclcompltypes.ComplianceHistoryItem{}, + SoftwareVersionCertificationStatus: types.CodeRevoked, + History: []*types.ComplianceHistoryItem{}, CDVersionNumber: msg.CDVersionNumber, SchemaVersion: msg.SchemaVersion, } diff --git a/x/compliance/keeper/msg_server_update_compliance_info.go b/x/compliance/keeper/msg_server_update_compliance_info.go index 778a51c01..017dfc94d 100644 --- a/x/compliance/keeper/msg_server_update_compliance_info.go +++ b/x/compliance/keeper/msg_server_update_compliance_info.go @@ -4,6 +4,7 @@ import ( "context" "strconv" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" @@ -18,11 +19,11 @@ func (k msgServer) UpdateComplianceInfo(goCtx context.Context, msg *types.MsgUpd signerAddr, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } if !k.dclauthKeeper.HasRole(ctx, signerAddr, dclauthtypes.CertificationCenter) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, "%s transaction should be "+ + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "%s transaction should be "+ "signed by an account with the %s role", "MsgUpdateComplianceInfo", dclauthtypes.CertificationCenter) } diff --git a/x/compliance/module.go b/x/compliance/module.go index f1e53d9a1..fdf5024fa 100644 --- a/x/compliance/module.go +++ b/x/compliance/module.go @@ -5,6 +5,7 @@ import ( "encoding/json" "fmt" + abci "github.com/cometbft/cometbft/abci/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/codec" cdctypes "github.com/cosmos/cosmos-sdk/codec/types" @@ -13,7 +14,6 @@ import ( "github.com/gorilla/mux" "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" - abci "github.com/tendermint/tendermint/abci/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/client/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" @@ -61,7 +61,7 @@ func (AppModuleBasic) DefaultGenesis(cdc codec.JSONCodec) json.RawMessage { } // ValidateGenesis performs genesis state validation for the capability module. -func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncodingConfig, bz json.RawMessage) error { +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, _ client.TxEncodingConfig, bz json.RawMessage) error { var genState types.GenesisState if err := cdc.UnmarshalJSON(bz, &genState); err != nil { return fmt.Errorf("failed to unmarshal %s genesis state: %w", types.ModuleName, err) @@ -71,7 +71,7 @@ func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncod } // RegisterRESTRoutes registers the capability module's REST service handlers. -func (AppModuleBasic) RegisterRESTRoutes(clientCtx client.Context, rtr *mux.Router) { +func (AppModuleBasic) RegisterRESTRoutes(_ client.Context, _ *mux.Router) { } // RegisterGRPCGatewayRoutes registers the gRPC Gateway routes for the module. @@ -112,19 +112,6 @@ func (am AppModule) Name() string { return am.AppModuleBasic.Name() } -// Route returns the capability module's message routing key. -func (am AppModule) Route() sdk.Route { - return sdk.NewRoute(types.RouterKey, NewHandler(am.keeper)) -} - -// QuerierRoute returns the capability module's query routing key. -func (AppModule) QuerierRoute() string { return types.QuerierRoute } - -// LegacyQuerierHandler returns the capability module's Querier. -func (am AppModule) LegacyQuerierHandler(legacyQuerierCdc *codec.LegacyAmino) sdk.Querier { - return nil -} - // RegisterServices registers a GRPC query service to respond to the // module-specific GRPC queries. func (am AppModule) RegisterServices(cfg module.Configurator) { diff --git a/x/compliance/module_simulation.go b/x/compliance/module_simulation.go index a8a875b27..59b76ec13 100644 --- a/x/compliance/module_simulation.go +++ b/x/compliance/module_simulation.go @@ -4,11 +4,11 @@ import ( "math/rand" "github.com/cosmos/cosmos-sdk/baseapp" - simappparams "github.com/cosmos/cosmos-sdk/simapp/params" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/module" simtypes "github.com/cosmos/cosmos-sdk/types/simulation" "github.com/cosmos/cosmos-sdk/x/simulation" + "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" compliancesimulation "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/simulation" "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" @@ -18,7 +18,6 @@ import ( var ( _ = sample.AccAddress _ = compliancesimulation.FindAccount - _ = simappparams.StakePerAccount _ = simulation.MsgEntryKind _ = baseapp.Paramspace ) @@ -60,15 +59,10 @@ func (AppModule) GenerateGenesisState(simState *module.SimulationState) { } // ProposalContents doesn't return any content functions for governance proposals. -func (AppModule) ProposalContents(_ module.SimulationState) []simtypes.WeightedProposalContent { +func (AppModule) ProposalContents(_ module.SimulationState) []simtypes.WeightedProposalContent { //nolint:staticcheck return nil } -// RandomizedParams creates randomized param changes for the simulator. -func (am AppModule) RandomizedParams(_ *rand.Rand) []simtypes.ParamChange { - return []simtypes.ParamChange{} -} - // RegisterStoreDecoder registers a decoder. func (am AppModule) RegisterStoreDecoder(_ sdk.StoreDecoderRegistry) {} diff --git a/x/compliance/simulation/certify_model.go b/x/compliance/simulation/certify_model.go index da8a9627c..98e167927 100644 --- a/x/compliance/simulation/certify_model.go +++ b/x/compliance/simulation/certify_model.go @@ -11,7 +11,7 @@ import ( ) func SimulateMsgCertifyModel( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/compliance/simulation/delete_compliance_info.go b/x/compliance/simulation/delete_compliance_info.go index 926897bdb..642f8ce8e 100644 --- a/x/compliance/simulation/delete_compliance_info.go +++ b/x/compliance/simulation/delete_compliance_info.go @@ -11,7 +11,7 @@ import ( ) func SimulateMsgDeleteComplianceInfo( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/compliance/simulation/provision_model.go b/x/compliance/simulation/provision_model.go index 7bd76f8f3..57162ef53 100644 --- a/x/compliance/simulation/provision_model.go +++ b/x/compliance/simulation/provision_model.go @@ -11,7 +11,7 @@ import ( ) func SimulateMsgProvisionModel( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/compliance/simulation/revoke_model.go b/x/compliance/simulation/revoke_model.go index 8b466c87f..7d55aa013 100644 --- a/x/compliance/simulation/revoke_model.go +++ b/x/compliance/simulation/revoke_model.go @@ -11,7 +11,7 @@ import ( ) func SimulateMsgRevokeModel( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/compliance/simulation/update_compliance_info.go b/x/compliance/simulation/update_compliance_info.go index b6dd675e1..033db00d5 100644 --- a/x/compliance/simulation/update_compliance_info.go +++ b/x/compliance/simulation/update_compliance_info.go @@ -11,7 +11,7 @@ import ( ) func SimulateMsgUpdateComplianceInfo( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/compliance/types/certified_model.pb.go b/x/compliance/types/certified_model.pb.go index 87ee1ced7..59527d345 100644 --- a/x/compliance/types/certified_model.pb.go +++ b/x/compliance/types/certified_model.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: compliance/certified_model.proto +// source: zigbeealliance/distributedcomplianceledger/compliance/certified_model.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -34,7 +34,7 @@ func (m *CertifiedModel) Reset() { *m = CertifiedModel{} } func (m *CertifiedModel) String() string { return proto.CompactTextString(m) } func (*CertifiedModel) ProtoMessage() {} func (*CertifiedModel) Descriptor() ([]byte, []int) { - return fileDescriptor_9158b154fcb2a9dd, []int{0} + return fileDescriptor_7be90d472c8a2fef, []int{0} } func (m *CertifiedModel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -102,27 +102,29 @@ func init() { proto.RegisterType((*CertifiedModel)(nil), "zigbeealliance.distributedcomplianceledger.compliance.CertifiedModel") } -func init() { proto.RegisterFile("compliance/certified_model.proto", fileDescriptor_9158b154fcb2a9dd) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/compliance/certified_model.proto", fileDescriptor_7be90d472c8a2fef) +} -var fileDescriptor_9158b154fcb2a9dd = []byte{ +var fileDescriptor_7be90d472c8a2fef = []byte{ // 272 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x90, 0x3d, 0x4e, 0xf4, 0x30, - 0x10, 0x86, 0xe3, 0x6f, 0xbf, 0x20, 0xb0, 0xc4, 0x5f, 0x44, 0x91, 0xca, 0x8a, 0xa8, 0x52, 0x90, - 0xb8, 0x40, 0x5c, 0x00, 0x4a, 0x44, 0x13, 0x21, 0x0a, 0x1a, 0xe4, 0xc4, 0xb3, 0x61, 0x24, 0x6f, - 0x6c, 0x39, 0xce, 0xc2, 0x72, 0x0a, 0xce, 0xc0, 0x69, 0x28, 0xb7, 0xa4, 0x44, 0xc9, 0x45, 0x50, - 0x30, 0xab, 0xac, 0xa0, 0xb3, 0x1f, 0x7b, 0x5e, 0xcd, 0xfb, 0xd0, 0xa4, 0xd2, 0x0b, 0xa3, 0x50, - 0x34, 0x15, 0xf0, 0x0a, 0xac, 0xc3, 0x39, 0x82, 0x7c, 0x58, 0x68, 0x09, 0x2a, 0x37, 0x56, 0x3b, - 0x1d, 0x5d, 0xbc, 0x60, 0x5d, 0x02, 0x08, 0xe5, 0x7f, 0xe5, 0x12, 0x5b, 0x67, 0xb1, 0xec, 0x1c, - 0xc8, 0x69, 0x56, 0x81, 0xac, 0xc1, 0xe6, 0x13, 0x38, 0x7d, 0x23, 0xf4, 0xe0, 0x6a, 0x13, 0x78, - 0x33, 0xe6, 0x45, 0x47, 0x74, 0xb6, 0x44, 0x19, 0x93, 0x84, 0xa4, 0x61, 0x31, 0x1e, 0x47, 0x62, - 0x50, 0xc6, 0xff, 0x3c, 0x31, 0x28, 0xa3, 0x94, 0x1e, 0xb6, 0x7a, 0xee, 0x9e, 0x84, 0x85, 0x3b, - 0xb0, 0x2d, 0xea, 0x26, 0x9e, 0x25, 0x24, 0xdd, 0x2f, 0x7e, 0xe3, 0xe8, 0x8c, 0x1e, 0xff, 0x2c, - 0x5c, 0x09, 0x87, 0xba, 0xb9, 0x5d, 0x19, 0x88, 0xff, 0x27, 0x24, 0xdd, 0x2b, 0xfe, 0x3e, 0x44, - 0x27, 0x34, 0x5c, 0x0a, 0xd5, 0x41, 0x1c, 0x26, 0x24, 0xdd, 0x2d, 0xfc, 0xe5, 0x12, 0xde, 0x7b, - 0x46, 0xd6, 0x3d, 0x23, 0x9f, 0x3d, 0x23, 0xaf, 0x03, 0x0b, 0xd6, 0x03, 0x0b, 0x3e, 0x06, 0x16, - 0xdc, 0x5f, 0xd7, 0xe8, 0x1e, 0xbb, 0x72, 0x6c, 0xc4, 0xbd, 0x80, 0x6c, 0x63, 0x80, 0x6f, 0x19, - 0xc8, 0xa6, 0xc6, 0x99, 0x77, 0xc0, 0x9f, 0xf9, 0x96, 0x52, 0xb7, 0x32, 0xd0, 0x96, 0x3b, 0xdf, - 0x26, 0xcf, 0xbf, 0x02, 0x00, 0x00, 0xff, 0xff, 0xb9, 0xa9, 0x16, 0xac, 0x6d, 0x01, 0x00, 0x00, + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xbf, 0x4a, 0xc4, 0x30, + 0x1c, 0xc7, 0x1b, 0xcf, 0x8a, 0x06, 0xfc, 0x57, 0x1c, 0x3a, 0x85, 0xe2, 0x94, 0xc1, 0xb6, 0x83, + 0xf8, 0x02, 0x3a, 0x1e, 0x2e, 0x45, 0x1c, 0x5c, 0x24, 0x6d, 0x7e, 0x57, 0x7f, 0x90, 0x6b, 0x42, + 0x9a, 0x9e, 0x9e, 0x4f, 0xe1, 0x33, 0xf8, 0x34, 0x8e, 0x37, 0x3a, 0x4a, 0xfb, 0x22, 0x52, 0x7b, + 0x52, 0xff, 0xe1, 0x96, 0x7c, 0x02, 0x1f, 0xf2, 0xfd, 0xd0, 0xe9, 0x23, 0x96, 0x39, 0x80, 0x50, + 0x0a, 0x45, 0x55, 0x40, 0x2a, 0xb1, 0x76, 0x16, 0xf3, 0xc6, 0x81, 0x2c, 0xf4, 0xdc, 0x0c, 0x54, + 0x81, 0x2c, 0xc1, 0xa6, 0x23, 0x48, 0x0b, 0xb0, 0x0e, 0x67, 0x08, 0xf2, 0x76, 0xae, 0x25, 0xa8, + 0xc4, 0x58, 0xed, 0x74, 0x70, 0xf6, 0x5d, 0x96, 0xfc, 0x23, 0x4b, 0x46, 0x70, 0xfc, 0x4c, 0xe8, + 0xde, 0xc5, 0xa7, 0xf0, 0xb2, 0xf7, 0x05, 0x07, 0x74, 0xb2, 0x40, 0x19, 0x92, 0x88, 0x70, 0x3f, + 0xeb, 0x8f, 0x3d, 0x31, 0x28, 0xc3, 0x8d, 0x81, 0x18, 0x94, 0x01, 0xa7, 0xfb, 0xb5, 0x9e, 0xb9, + 0x7b, 0x61, 0xe1, 0x1a, 0x6c, 0x8d, 0xba, 0x0a, 0x27, 0x11, 0xe1, 0xbb, 0xd9, 0x4f, 0x1c, 0x9c, + 0xd0, 0xc3, 0xf5, 0x87, 0x0b, 0xe1, 0x50, 0x57, 0x57, 0x4b, 0x03, 0xe1, 0x66, 0x44, 0xf8, 0x4e, + 0xf6, 0xfb, 0x21, 0x38, 0xa2, 0xfe, 0x42, 0xa8, 0x06, 0x42, 0x3f, 0x22, 0x7c, 0x3b, 0x1b, 0x2e, + 0xe7, 0xf0, 0xd2, 0x32, 0xb2, 0x6a, 0x19, 0x79, 0x6b, 0x19, 0x79, 0xea, 0x98, 0xb7, 0xea, 0x98, + 0xf7, 0xda, 0x31, 0xef, 0x66, 0x5a, 0xa2, 0xbb, 0x6b, 0xf2, 0x7e, 0x51, 0x3a, 0x04, 0x88, 0xff, + 0xca, 0x19, 0x8f, 0x8b, 0xe3, 0x75, 0xd0, 0x87, 0xaf, 0x49, 0xdd, 0xd2, 0x40, 0x9d, 0x6f, 0x7d, + 0x94, 0x3c, 0x7d, 0x0f, 0x00, 0x00, 0xff, 0xff, 0xe0, 0x49, 0x94, 0xb3, 0x98, 0x01, 0x00, 0x00, } func (m *CertifiedModel) Marshal() (dAtA []byte, err error) { diff --git a/x/compliance/types/codec.go b/x/compliance/types/codec.go index f8fe5af5c..855b20188 100644 --- a/x/compliance/types/codec.go +++ b/x/compliance/types/codec.go @@ -36,7 +36,7 @@ func RegisterInterfaces(registry cdctypes.InterfaceRegistry) { ) // this line is used by starport scaffolding # 3 - msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) + msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) //nolint:nosnakecase } var ModuleCdc = codec.NewProtoCodec(cdctypes.NewInterfaceRegistry()) diff --git a/types/compliance/compliance_history_item.pb.go b/x/compliance/types/compliance_history_item.pb.go similarity index 84% rename from types/compliance/compliance_history_item.pb.go rename to x/compliance/types/compliance_history_item.pb.go index 376b01ef5..9559e01f1 100644 --- a/types/compliance/compliance_history_item.pb.go +++ b/x/compliance/types/compliance_history_item.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: compliance/compliance_history_item.proto +// source: zigbeealliance/distributedcomplianceledger/compliance/compliance_history_item.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -33,7 +33,7 @@ func (m *ComplianceHistoryItem) Reset() { *m = ComplianceHistoryItem{} } func (m *ComplianceHistoryItem) String() string { return proto.CompactTextString(m) } func (*ComplianceHistoryItem) ProtoMessage() {} func (*ComplianceHistoryItem) Descriptor() ([]byte, []int) { - return fileDescriptor_5f19be5c9c9ea390, []int{0} + return fileDescriptor_e9a4634fe5730cca, []int{0} } func (m *ComplianceHistoryItem) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -95,28 +95,28 @@ func init() { } func init() { - proto.RegisterFile("compliance/compliance_history_item.proto", fileDescriptor_5f19be5c9c9ea390) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/compliance/compliance_history_item.proto", fileDescriptor_e9a4634fe5730cca) } -var fileDescriptor_5f19be5c9c9ea390 = []byte{ +var fileDescriptor_e9a4634fe5730cca = []byte{ // 275 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x90, 0x31, 0x4b, 0xc3, 0x40, - 0x1c, 0xc5, 0x73, 0x5a, 0x0a, 0x1e, 0x88, 0x70, 0xa0, 0x64, 0x3a, 0x4a, 0xa7, 0x2c, 0x49, 0x06, - 0xf1, 0x0b, 0x58, 0x07, 0x45, 0xe8, 0x50, 0xc1, 0xc1, 0xa5, 0xdc, 0x25, 0xff, 0xa6, 0x7f, 0x48, - 0x72, 0xe1, 0xee, 0x1f, 0xb4, 0x7e, 0x0a, 0xbf, 0x90, 0xbb, 0x63, 0x47, 0x47, 0x49, 0xbe, 0x88, - 0x98, 0x54, 0x53, 0x9c, 0xba, 0xbd, 0xf7, 0x78, 0xf7, 0x8e, 0xff, 0x8f, 0x07, 0x89, 0x29, 0xaa, - 0x1c, 0x55, 0x99, 0x40, 0x3c, 0xc8, 0xe5, 0x1a, 0x1d, 0x19, 0xbb, 0x59, 0x22, 0x41, 0x11, 0x55, - 0xd6, 0x90, 0x11, 0x57, 0xaf, 0x98, 0x69, 0x00, 0x95, 0xf7, 0x95, 0x28, 0x45, 0x47, 0x16, 0x75, - 0x4d, 0x90, 0x0e, 0x0f, 0x73, 0x48, 0x33, 0xb0, 0xd1, 0x10, 0x4c, 0xdf, 0x19, 0x3f, 0x9f, 0xfd, - 0xd9, 0xdb, 0x7e, 0xf7, 0x8e, 0xa0, 0x10, 0x73, 0x3e, 0x75, 0x66, 0x45, 0xcf, 0xca, 0xc2, 0x23, - 0x58, 0x87, 0xa6, 0x9c, 0x81, 0x25, 0x5c, 0x61, 0xa2, 0x08, 0x4d, 0xf9, 0x40, 0x8a, 0x6a, 0xe7, - 0xb3, 0x09, 0x0b, 0x4e, 0x17, 0x07, 0x34, 0x85, 0xe0, 0xa3, 0x54, 0x11, 0xf8, 0x47, 0x13, 0x16, - 0x9c, 0x2c, 0x3a, 0x2d, 0x2e, 0xf8, 0xd8, 0x82, 0x72, 0xa6, 0xf4, 0x8f, 0xbb, 0x74, 0xe7, 0x44, - 0xc0, 0xcf, 0x92, 0x9b, 0xdd, 0xd6, 0xbc, 0x2e, 0x34, 0x58, 0x7f, 0xd4, 0x7d, 0xf4, 0x3f, 0xbe, - 0x86, 0x8f, 0x46, 0xb2, 0x6d, 0x23, 0xd9, 0x57, 0x23, 0xd9, 0x5b, 0x2b, 0xbd, 0x6d, 0x2b, 0xbd, - 0xcf, 0x56, 0x7a, 0x4f, 0xf7, 0x19, 0xd2, 0xba, 0xd6, 0x3f, 0xc7, 0xc6, 0x3d, 0x9b, 0xf0, 0x17, - 0x4e, 0xbc, 0x07, 0x27, 0x1c, 0x60, 0x84, 0x3d, 0x9e, 0xf8, 0x65, 0x0f, 0x75, 0x4c, 0x9b, 0x0a, - 0x9c, 0x1e, 0x77, 0x90, 0x2f, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0xec, 0xe6, 0xca, 0xb9, 0x90, + 0x14, 0x80, 0x73, 0x5a, 0x0a, 0x1e, 0x88, 0x70, 0xa0, 0x64, 0x3a, 0x4a, 0xa7, 0x2c, 0x49, 0x06, + 0xf1, 0x0f, 0x58, 0x07, 0x45, 0xe8, 0xd0, 0x82, 0x83, 0x4b, 0xb9, 0x24, 0xaf, 0xe9, 0x83, 0x24, + 0x17, 0xee, 0x5e, 0xd0, 0xfa, 0x2b, 0xfc, 0x43, 0xee, 0x8e, 0x1d, 0x1d, 0x25, 0xf9, 0x23, 0x62, + 0x12, 0x4c, 0x15, 0x11, 0xb7, 0xf7, 0x3e, 0x8e, 0xef, 0x78, 0x1f, 0x5f, 0x3e, 0x61, 0x1a, 0x01, + 0xa8, 0x2c, 0x43, 0x55, 0xc4, 0x10, 0x26, 0x68, 0xc9, 0x60, 0x54, 0x11, 0x24, 0xb1, 0xce, 0xcb, + 0x8e, 0x66, 0x90, 0xa4, 0x60, 0xc2, 0x01, 0xec, 0x8d, 0xab, 0x0d, 0x5a, 0xd2, 0x66, 0xbb, 0x42, + 0x82, 0x3c, 0x28, 0x8d, 0x26, 0x2d, 0x2e, 0xbe, 0x4b, 0x83, 0x3f, 0xa4, 0xc1, 0x00, 0xa6, 0x2f, + 0x8c, 0x9f, 0xce, 0xbe, 0xd6, 0xeb, 0xce, 0x7b, 0x43, 0x90, 0x8b, 0x39, 0x9f, 0x5a, 0xbd, 0xa6, + 0x07, 0x65, 0xe0, 0x0e, 0x8c, 0x45, 0x5d, 0xcc, 0xc0, 0x10, 0xae, 0x31, 0x56, 0x84, 0xba, 0x58, + 0x92, 0xa2, 0xca, 0xba, 0x6c, 0xc2, 0xbc, 0xe3, 0xc5, 0x3f, 0x5e, 0x0a, 0xc1, 0x47, 0x89, 0x22, + 0x70, 0x0f, 0x26, 0xcc, 0x3b, 0x5a, 0xb4, 0xb3, 0x38, 0xe3, 0x63, 0x03, 0xca, 0xea, 0xc2, 0x3d, + 0x6c, 0x69, 0xbf, 0x09, 0x8f, 0x9f, 0xc4, 0x57, 0xbd, 0x6b, 0x5e, 0xe5, 0x11, 0x18, 0x77, 0xd4, + 0x7e, 0xf4, 0x13, 0x5f, 0xc2, 0x6b, 0x2d, 0xd9, 0xae, 0x96, 0xec, 0xbd, 0x96, 0xec, 0xb9, 0x91, + 0xce, 0xae, 0x91, 0xce, 0x5b, 0x23, 0x9d, 0xfb, 0xdb, 0x14, 0x69, 0x53, 0x45, 0x9f, 0xc7, 0x86, + 0x5d, 0x1b, 0xff, 0xb7, 0xe2, 0xfe, 0x10, 0xc3, 0xef, 0x9b, 0x3f, 0xee, 0x57, 0xa7, 0x6d, 0x09, + 0x36, 0x1a, 0xb7, 0x91, 0xcf, 0x3f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x4a, 0xe3, 0x3e, 0x94, 0xbb, 0x01, 0x00, 0x00, } diff --git a/types/compliance/compliance_info.go b/x/compliance/types/compliance_info.go similarity index 100% rename from types/compliance/compliance_info.go rename to x/compliance/types/compliance_info.go diff --git a/types/compliance/compliance_info.pb.go b/x/compliance/types/compliance_info.pb.go similarity index 89% rename from types/compliance/compliance_info.pb.go rename to x/compliance/types/compliance_info.pb.go index 48c0c6a83..c5233ff70 100644 --- a/types/compliance/compliance_info.pb.go +++ b/x/compliance/types/compliance_info.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: compliance/compliance_info.proto +// source: zigbeealliance/distributedcomplianceledger/compliance/compliance_info.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -54,7 +54,7 @@ func (m *ComplianceInfo) Reset() { *m = ComplianceInfo{} } func (m *ComplianceInfo) String() string { return proto.CompactTextString(m) } func (*ComplianceInfo) ProtoMessage() {} func (*ComplianceInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_96709118384d66e8, []int{0} + return fileDescriptor_b59222752039640e, []int{0} } func (m *ComplianceInfo) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -255,49 +255,51 @@ func init() { proto.RegisterType((*ComplianceInfo)(nil), "zigbeealliance.distributedcomplianceledger.compliance.ComplianceInfo") } -func init() { proto.RegisterFile("compliance/compliance_info.proto", fileDescriptor_96709118384d66e8) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/compliance/compliance_info.proto", fileDescriptor_b59222752039640e) +} -var fileDescriptor_96709118384d66e8 = []byte{ - // 614 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0xcb, 0x6e, 0x13, 0x31, - 0x14, 0xed, 0xd0, 0xb7, 0x4b, 0x5f, 0xa6, 0x2d, 0xa6, 0x42, 0x51, 0x54, 0xb1, 0xc8, 0x82, 0x4c, - 0x24, 0x1e, 0x1b, 0x76, 0x34, 0x2c, 0x88, 0x40, 0x2d, 0x9a, 0x00, 0x0b, 0x36, 0x95, 0x33, 0xbe, - 0x93, 0x58, 0x9a, 0xb1, 0x47, 0xb6, 0x87, 0x12, 0xbe, 0x82, 0x8f, 0x61, 0xc9, 0x07, 0xb0, 0xac, - 0x58, 0xb1, 0x44, 0xed, 0x8f, 0x20, 0xdb, 0x33, 0x9d, 0x34, 0x09, 0x52, 0xc5, 0xce, 0xf7, 0x9c, - 0xfb, 0x38, 0xf6, 0x3d, 0x33, 0xa8, 0x19, 0xcb, 0x2c, 0x4f, 0x39, 0x15, 0x31, 0x74, 0xea, 0xe3, - 0x19, 0x17, 0x89, 0x0c, 0x73, 0x25, 0x8d, 0xc4, 0xcf, 0xbf, 0xf2, 0xe1, 0x00, 0x80, 0xa6, 0x9e, - 0x0a, 0x19, 0xd7, 0x46, 0xf1, 0x41, 0x61, 0x80, 0xd5, 0x05, 0x29, 0xb0, 0x21, 0xa8, 0xb0, 0x06, - 0x0e, 0x5b, 0xf3, 0x1b, 0x8f, 0xb8, 0x36, 0x52, 0x8d, 0xcf, 0xb8, 0x81, 0xcc, 0x0f, 0x38, 0x7c, - 0x10, 0x4b, 0x9d, 0x49, 0x7d, 0xe6, 0xa2, 0x8e, 0x0f, 0x3c, 0x75, 0xf4, 0x63, 0x0d, 0x6d, 0x75, - 0xaf, 0x8b, 0x7b, 0x22, 0x91, 0x78, 0x07, 0x2d, 0x7e, 0xe6, 0x8c, 0x04, 0xcd, 0xa0, 0xb5, 0x1c, - 0xd9, 0xa3, 0x45, 0x72, 0xce, 0xc8, 0x1d, 0x8f, 0xe4, 0x9c, 0xe1, 0x16, 0xda, 0xd6, 0x32, 0x31, - 0xe7, 0x54, 0xc1, 0x47, 0x50, 0x9a, 0x4b, 0x41, 0x16, 0x9b, 0x41, 0x6b, 0x33, 0x9a, 0x86, 0xf1, - 0x63, 0xb4, 0x1b, 0x83, 0x32, 0x3c, 0xe1, 0x31, 0x35, 0x5c, 0x8a, 0xf7, 0xe3, 0x1c, 0xc8, 0x52, - 0x33, 0x68, 0xad, 0x47, 0xb3, 0x04, 0x7e, 0x86, 0xf6, 0xa7, 0x1a, 0xf4, 0x8d, 0xe2, 0x62, 0x48, - 0x96, 0x5d, 0xc5, 0x7c, 0xd2, 0xaa, 0x89, 0x5f, 0x95, 0xd0, 0x49, 0x91, 0x0d, 0x40, 0x91, 0x15, - 0xaf, 0x66, 0x0a, 0xc6, 0x27, 0xe8, 0x68, 0xaa, 0x45, 0x77, 0x52, 0x43, 0xdf, 0x50, 0x53, 0x68, - 0xb2, 0xea, 0x8a, 0x6f, 0x91, 0x89, 0x31, 0x5a, 0x62, 0xd4, 0x00, 0x59, 0x73, 0xf2, 0xdc, 0x19, - 0x1f, 0xa0, 0x15, 0x05, 0x54, 0x4b, 0x41, 0xd6, 0x1d, 0x5a, 0x46, 0x38, 0x44, 0xcb, 0xf2, 0x5c, - 0x80, 0x22, 0xc8, 0xc2, 0xc7, 0xe4, 0xd7, 0xf7, 0xf6, 0x5e, 0xb9, 0x8b, 0x97, 0x8c, 0x29, 0xd0, - 0xda, 0x5f, 0x27, 0xf2, 0x69, 0x38, 0x41, 0xab, 0xe5, 0x2e, 0xc9, 0x46, 0x73, 0xb1, 0xb5, 0xf1, - 0xe4, 0x6d, 0xf8, 0x5f, 0x46, 0x09, 0xeb, 0xfd, 0xbe, 0xf6, 0xfd, 0x7a, 0x06, 0xb2, 0xa8, 0x6a, - 0xee, 0x5f, 0xaf, 0xbe, 0x1c, 0xf4, 0x18, 0xb9, 0xeb, 0x84, 0x4f, 0xc3, 0x38, 0x44, 0xf8, 0xc6, - 0xca, 0x22, 0x59, 0x18, 0x20, 0x9b, 0x2e, 0x79, 0x0e, 0x83, 0x9b, 0x68, 0x23, 0x57, 0x72, 0xa8, - 0x68, 0xe6, 0xb6, 0xbe, 0xe5, 0x12, 0x27, 0x21, 0xdb, 0x71, 0x22, 0xac, 0xac, 0xb4, 0xed, 0x3b, - 0xce, 0x32, 0xd6, 0x1f, 0xd5, 0xc5, 0xcc, 0xbb, 0x94, 0x9a, 0x44, 0xaa, 0xec, 0x83, 0x06, 0x46, - 0x76, 0xbc, 0x3f, 0xe6, 0x92, 0xf8, 0x05, 0x22, 0x33, 0x44, 0x35, 0x6b, 0xd7, 0x15, 0xfe, 0x93, - 0xc7, 0x0f, 0xd1, 0xba, 0x51, 0x54, 0xe8, 0x5c, 0x2a, 0x43, 0xb0, 0x4b, 0xae, 0x01, 0x7c, 0x88, - 0xd6, 0x12, 0x9a, 0xf1, 0x74, 0xdc, 0x63, 0xe4, 0x9e, 0x23, 0xaf, 0x63, 0xeb, 0x7c, 0x5d, 0xe4, - 0x36, 0x0d, 0x58, 0x37, 0x2d, 0xb4, 0x01, 0xa5, 0xc9, 0x9e, 0x77, 0xfe, 0x0c, 0x61, 0xe7, 0x9c, - 0xf6, 0x2b, 0x51, 0xfb, 0x7e, 0xce, 0x35, 0xe0, 0x5e, 0x92, 0x2a, 0x10, 0xa6, 0x3b, 0xe2, 0x29, - 0x23, 0x07, 0xe5, 0x4b, 0xd6, 0x90, 0x75, 0xf6, 0x8d, 0x0d, 0xf4, 0xd8, 0x69, 0xd2, 0x2f, 0x0d, - 0x6c, 0x0d, 0x20, 0x05, 0x08, 0x43, 0xee, 0xbb, 0xc2, 0x5b, 0x64, 0xe2, 0x47, 0x68, 0x53, 0xc7, - 0x23, 0xc8, 0x68, 0xa5, 0x89, 0xb8, 0x8f, 0xe2, 0x26, 0x78, 0x0c, 0x3f, 0x2f, 0x1b, 0xc1, 0xc5, - 0x65, 0x23, 0xf8, 0x73, 0xd9, 0x08, 0xbe, 0x5d, 0x35, 0x16, 0x2e, 0xae, 0x1a, 0x0b, 0xbf, 0xaf, - 0x1a, 0x0b, 0x9f, 0xde, 0x0c, 0xb9, 0x19, 0x15, 0x03, 0xeb, 0xc3, 0x8e, 0xb7, 0x6d, 0xbb, 0xf2, - 0x6d, 0x67, 0xc2, 0xb7, 0xed, 0xda, 0xa7, 0x6d, 0xef, 0xdc, 0xce, 0x97, 0x89, 0xbf, 0x59, 0xc7, - 0x8c, 0x73, 0xd0, 0x83, 0x15, 0xf7, 0xb3, 0x7a, 0xfa, 0x37, 0x00, 0x00, 0xff, 0xff, 0xda, 0xe4, - 0x3c, 0xef, 0x4c, 0x05, 0x00, 0x00, +var fileDescriptor_b59222752039640e = []byte{ + // 619 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x54, 0xcb, 0x6e, 0x13, 0x3d, + 0x14, 0xee, 0xfc, 0xbd, 0xbb, 0x7f, 0x6f, 0xa6, 0x2d, 0xa6, 0x42, 0x51, 0x54, 0xb1, 0xc8, 0x82, + 0x4c, 0x24, 0x2e, 0x1b, 0x76, 0x34, 0x2c, 0x88, 0x8a, 0x5a, 0x94, 0x00, 0x0b, 0x36, 0x95, 0x33, + 0x3e, 0x93, 0x58, 0x9a, 0xb1, 0x47, 0xb6, 0x87, 0x12, 0x9e, 0x82, 0x87, 0x61, 0xc9, 0x03, 0xb0, + 0xac, 0x58, 0xb1, 0x44, 0xed, 0x8b, 0x20, 0xdb, 0x33, 0x9d, 0xdc, 0x40, 0x15, 0xec, 0xe6, 0x7c, + 0xdf, 0x39, 0xc7, 0x9f, 0x7d, 0xbe, 0x39, 0xe8, 0xe4, 0x13, 0x1f, 0xf4, 0x01, 0x68, 0x92, 0x70, + 0x2a, 0x22, 0x68, 0x31, 0xae, 0x8d, 0xe2, 0xfd, 0xdc, 0x00, 0x8b, 0x64, 0x9a, 0x79, 0x34, 0x01, + 0x36, 0x00, 0xd5, 0xaa, 0x80, 0xb1, 0xcf, 0x73, 0x2e, 0x62, 0x19, 0x66, 0x4a, 0x1a, 0x89, 0x9f, + 0x4e, 0x36, 0x0b, 0xff, 0xd0, 0x2c, 0xac, 0x80, 0xc3, 0xde, 0x3f, 0x6b, 0x18, 0x72, 0x6d, 0xa4, + 0x1a, 0x9d, 0x73, 0x03, 0xa9, 0xd7, 0x72, 0x78, 0x2f, 0x92, 0x3a, 0x95, 0xfa, 0xdc, 0x45, 0x2d, + 0x1f, 0x78, 0xea, 0xe8, 0xeb, 0x1a, 0xda, 0x6a, 0xdf, 0x14, 0x77, 0x44, 0x2c, 0xf1, 0x0e, 0x5a, + 0xfc, 0xc0, 0x19, 0x09, 0xea, 0x41, 0x63, 0xb9, 0x6b, 0x3f, 0x2d, 0x92, 0x71, 0x46, 0xfe, 0xf3, + 0x48, 0xc6, 0x19, 0x6e, 0xa0, 0x6d, 0x2d, 0x63, 0x73, 0x41, 0x15, 0xbc, 0x03, 0xa5, 0xb9, 0x14, + 0x64, 0xb1, 0x1e, 0x34, 0x36, 0xbb, 0xd3, 0x30, 0x7e, 0x88, 0x76, 0x23, 0x50, 0x86, 0xc7, 0x3c, + 0xa2, 0x86, 0x4b, 0xf1, 0x66, 0x94, 0x01, 0x59, 0xaa, 0x07, 0x8d, 0xf5, 0xee, 0x2c, 0x81, 0x9f, + 0xa0, 0xfd, 0xa9, 0x06, 0x3d, 0xa3, 0xb8, 0x18, 0x90, 0x65, 0x57, 0x31, 0x9f, 0xb4, 0x6a, 0xa2, + 0x17, 0x05, 0x74, 0x9a, 0xa7, 0x7d, 0x50, 0x64, 0xc5, 0xab, 0x99, 0x82, 0xf1, 0x29, 0x3a, 0x9a, + 0x6a, 0xd1, 0x1e, 0xd7, 0xd0, 0x33, 0xd4, 0xe4, 0x9a, 0xac, 0xba, 0xe2, 0x5b, 0x64, 0x62, 0x8c, + 0x96, 0x18, 0x35, 0x40, 0xd6, 0x9c, 0x3c, 0xf7, 0x8d, 0x0f, 0xd0, 0x8a, 0x02, 0xaa, 0xa5, 0x20, + 0xeb, 0x0e, 0x2d, 0x22, 0x1c, 0xa2, 0x65, 0x79, 0x21, 0x40, 0x11, 0x64, 0xe1, 0x63, 0xf2, 0xfd, + 0x4b, 0x73, 0xaf, 0x98, 0xc5, 0x73, 0xc6, 0x14, 0x68, 0xed, 0xaf, 0xd3, 0xf5, 0x69, 0x38, 0x46, + 0xab, 0xc5, 0x2c, 0xc9, 0x46, 0x7d, 0xb1, 0xb1, 0xf1, 0xe8, 0x55, 0xf8, 0x57, 0x9e, 0x0a, 0xab, + 0xf9, 0xbe, 0xf4, 0xfd, 0x3a, 0x06, 0xd2, 0x6e, 0xd9, 0xdc, 0xbf, 0x5e, 0x75, 0x39, 0xe8, 0x30, + 0xf2, 0xbf, 0x13, 0x3e, 0x0d, 0xe3, 0x10, 0xe1, 0x89, 0x91, 0x75, 0x65, 0x6e, 0x80, 0x6c, 0xba, + 0xe4, 0x39, 0x0c, 0xae, 0xa3, 0x8d, 0x4c, 0xc9, 0x81, 0xa2, 0xa9, 0x9b, 0xfa, 0x96, 0x4b, 0x1c, + 0x87, 0x6c, 0xc7, 0xb1, 0xb0, 0xb4, 0xd2, 0xb6, 0xef, 0x38, 0xcb, 0x58, 0x7f, 0x94, 0x17, 0x33, + 0xaf, 0x13, 0x6a, 0x62, 0xa9, 0xd2, 0xb7, 0x1a, 0x18, 0xd9, 0xf1, 0xfe, 0x98, 0x4b, 0xe2, 0x67, + 0x88, 0xcc, 0x10, 0xe5, 0x59, 0xbb, 0xae, 0xf0, 0xb7, 0x3c, 0xbe, 0x8f, 0xd6, 0x8d, 0xa2, 0x42, + 0x67, 0x52, 0x19, 0x82, 0x5d, 0x72, 0x05, 0xe0, 0x43, 0xb4, 0x16, 0xd3, 0x94, 0x27, 0xa3, 0x0e, + 0x23, 0x77, 0x1c, 0x79, 0x13, 0x5b, 0xe7, 0xeb, 0x3c, 0xb3, 0x69, 0xc0, 0xda, 0x49, 0xae, 0x0d, + 0x28, 0x4d, 0xf6, 0xbc, 0xf3, 0x67, 0x08, 0x7b, 0xce, 0x59, 0xaf, 0x14, 0xb5, 0xef, 0xcf, 0xb9, + 0x01, 0xdc, 0x4b, 0x52, 0x05, 0xc2, 0xb4, 0x87, 0x3c, 0x61, 0xe4, 0xa0, 0x78, 0xc9, 0x0a, 0xb2, + 0xce, 0x9e, 0x98, 0x40, 0x87, 0x9d, 0xc5, 0xbd, 0xc2, 0xc0, 0xd6, 0x00, 0x52, 0x80, 0x30, 0xe4, + 0xae, 0x2b, 0xbc, 0x45, 0x26, 0x7e, 0x80, 0x36, 0x75, 0x34, 0x84, 0x94, 0x96, 0x9a, 0x88, 0xfb, + 0x29, 0x26, 0xc1, 0x63, 0xf8, 0x76, 0x55, 0x0b, 0x2e, 0xaf, 0x6a, 0xc1, 0xcf, 0xab, 0x5a, 0xf0, + 0xf9, 0xba, 0xb6, 0x70, 0x79, 0x5d, 0x5b, 0xf8, 0x71, 0x5d, 0x5b, 0x78, 0x7f, 0x32, 0xe0, 0x66, + 0x98, 0xf7, 0xad, 0x0f, 0x5b, 0xde, 0xb6, 0xcd, 0x79, 0x4b, 0xad, 0x59, 0xf9, 0xb4, 0x59, 0xac, + 0xb5, 0x8f, 0xe3, 0x8b, 0xcd, 0x8c, 0x32, 0xd0, 0xfd, 0x15, 0xb7, 0xac, 0x1e, 0xff, 0x0a, 0x00, + 0x00, 0xff, 0xff, 0x28, 0xb4, 0x1e, 0x70, 0xa2, 0x05, 0x00, 0x00, } func (m *ComplianceInfo) Marshal() (dAtA []byte, err error) { diff --git a/x/compliance/types/device_software_compliance.pb.go b/x/compliance/types/device_software_compliance.pb.go index c58f7f66a..794ba444e 100644 --- a/x/compliance/types/device_software_compliance.pb.go +++ b/x/compliance/types/device_software_compliance.pb.go @@ -1,16 +1,14 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: compliance/device_software_compliance.proto +// source: zigbeealliance/distributedcomplianceledger/compliance/device_software_compliance.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" - - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" ) // Reference imports to suppress errors if they are not otherwise used. @@ -25,15 +23,15 @@ var _ = math.Inf const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package type DeviceSoftwareCompliance struct { - CDCertificateId string `protobuf:"bytes,1,opt,name=cDCertificateId,proto3" json:"cDCertificateId,omitempty"` - ComplianceInfo []*dclcompltypes.ComplianceInfo `protobuf:"bytes,2,rep,name=complianceInfo,proto3" json:"complianceInfo,omitempty"` + CDCertificateId string `protobuf:"bytes,1,opt,name=cDCertificateId,proto3" json:"cDCertificateId,omitempty"` + ComplianceInfo []*ComplianceInfo `protobuf:"bytes,2,rep,name=complianceInfo,proto3" json:"complianceInfo,omitempty"` } func (m *DeviceSoftwareCompliance) Reset() { *m = DeviceSoftwareCompliance{} } func (m *DeviceSoftwareCompliance) String() string { return proto.CompactTextString(m) } func (*DeviceSoftwareCompliance) ProtoMessage() {} func (*DeviceSoftwareCompliance) Descriptor() ([]byte, []int) { - return fileDescriptor_22c66eeddd1d6355, []int{0} + return fileDescriptor_2c74e1bdb764c708, []int{0} } func (m *DeviceSoftwareCompliance) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -69,7 +67,7 @@ func (m *DeviceSoftwareCompliance) GetCDCertificateId() string { return "" } -func (m *DeviceSoftwareCompliance) GetComplianceInfo() []*dclcompltypes.ComplianceInfo { +func (m *DeviceSoftwareCompliance) GetComplianceInfo() []*ComplianceInfo { if m != nil { return m.ComplianceInfo } @@ -81,27 +79,27 @@ func init() { } func init() { - proto.RegisterFile("compliance/device_software_compliance.proto", fileDescriptor_22c66eeddd1d6355) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/compliance/device_software_compliance.proto", fileDescriptor_2c74e1bdb764c708) } -var fileDescriptor_22c66eeddd1d6355 = []byte{ - // 249 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x4e, 0xce, 0xcf, 0x2d, - 0xc8, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0x49, 0x2d, 0xcb, 0x4c, 0x4e, 0x8d, 0x2f, 0xce, - 0x4f, 0x2b, 0x29, 0x4f, 0x2c, 0x4a, 0x8d, 0x47, 0x48, 0xe9, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, - 0x99, 0x56, 0x65, 0xa6, 0x27, 0xa5, 0xa6, 0x26, 0xe6, 0x40, 0x45, 0x53, 0x32, 0x8b, 0x4b, 0x8a, - 0x32, 0x93, 0x4a, 0x4b, 0x52, 0x53, 0x10, 0x6a, 0x73, 0x52, 0x53, 0xd2, 0x53, 0x8b, 0xf4, 0x10, - 0x02, 0x52, 0x0a, 0x48, 0x76, 0x20, 0x98, 0xf1, 0x99, 0x79, 0x69, 0xf9, 0x10, 0x83, 0x95, 0x36, - 0x33, 0x72, 0x49, 0xb8, 0x80, 0x6d, 0x0f, 0x86, 0x5a, 0xee, 0x0c, 0x57, 0x27, 0xa4, 0xc1, 0xc5, - 0x9f, 0xec, 0xe2, 0x9c, 0x5a, 0x54, 0x92, 0x99, 0x96, 0x99, 0x9c, 0x58, 0x92, 0xea, 0x99, 0x22, - 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0x19, 0x84, 0x2e, 0x2c, 0x94, 0xcb, 0xc5, 0x87, 0x30, 0xdf, 0x33, - 0x2f, 0x2d, 0x5f, 0x82, 0x49, 0x81, 0x59, 0x83, 0xdb, 0xc8, 0x55, 0x8f, 0x2c, 0x87, 0xeb, 0x39, - 0xa3, 0x18, 0x16, 0x84, 0x66, 0xb8, 0x53, 0xea, 0x89, 0x47, 0x72, 0x8c, 0x17, 0x1e, 0xc9, 0x31, - 0x3e, 0x78, 0x24, 0xc7, 0x38, 0xe1, 0xb1, 0x1c, 0xc3, 0x85, 0xc7, 0x72, 0x0c, 0x37, 0x1e, 0xcb, - 0x31, 0x44, 0x79, 0xa7, 0x67, 0x96, 0x64, 0x94, 0x26, 0x81, 0xcc, 0xd2, 0x87, 0x58, 0xad, 0x0b, - 0xb3, 0x5b, 0x1f, 0xc9, 0x6e, 0x5d, 0x84, 0x81, 0xba, 0x10, 0xdb, 0xf5, 0x2b, 0x90, 0x42, 0x48, - 0xbf, 0xa4, 0xb2, 0x20, 0xb5, 0x38, 0x89, 0x0d, 0x1c, 0x46, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, - 0xff, 0x1d, 0x6f, 0xd6, 0x9c, 0xab, 0x01, 0x00, 0x00, +var fileDescriptor_2c74e1bdb764c708 = []byte{ + // 252 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x0a, 0xab, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x23, 0x04, 0xf4, 0x53, 0x52, 0xcb, 0x32, 0x93, 0x53, 0xe3, 0x8b, 0xf3, 0xd3, + 0x4a, 0xca, 0x13, 0x8b, 0x52, 0xe3, 0x11, 0x52, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, 0x42, 0xa6, + 0xa8, 0xe6, 0xea, 0xe1, 0x31, 0x57, 0x0f, 0x21, 0x20, 0xe5, 0x4d, 0x9e, 0x73, 0x10, 0xcc, 0xf8, + 0xcc, 0xbc, 0xb4, 0x7c, 0x88, 0x1b, 0x94, 0x36, 0x33, 0x72, 0x49, 0xb8, 0x80, 0x1d, 0x1a, 0x0c, + 0x75, 0xa7, 0x33, 0x5c, 0x9d, 0x90, 0x06, 0x17, 0x7f, 0xb2, 0x8b, 0x73, 0x6a, 0x51, 0x49, 0x66, + 0x5a, 0x66, 0x72, 0x62, 0x49, 0xaa, 0x67, 0x8a, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x67, 0x10, 0xba, + 0xb0, 0x50, 0x2e, 0x17, 0x1f, 0xc2, 0x7c, 0xcf, 0xbc, 0xb4, 0x7c, 0x09, 0x26, 0x05, 0x66, 0x0d, + 0x6e, 0x23, 0x57, 0x3d, 0xb2, 0xfc, 0xa8, 0xe7, 0x8c, 0x62, 0x58, 0x10, 0x9a, 0xe1, 0x4e, 0xa9, + 0x27, 0x1e, 0xc9, 0x31, 0x5e, 0x78, 0x24, 0xc7, 0xf8, 0xe0, 0x91, 0x1c, 0xe3, 0x84, 0xc7, 0x72, + 0x0c, 0x17, 0x1e, 0xcb, 0x31, 0xdc, 0x78, 0x2c, 0xc7, 0x10, 0xe5, 0x9d, 0x9e, 0x59, 0x92, 0x51, + 0x9a, 0x04, 0x32, 0x4b, 0x1f, 0x62, 0xb5, 0x2e, 0xb6, 0x80, 0xd2, 0x45, 0x18, 0xa8, 0x0b, 0x0d, + 0xaa, 0x0a, 0xe4, 0xc0, 0x2a, 0xa9, 0x2c, 0x48, 0x2d, 0x4e, 0x62, 0x03, 0x87, 0x91, 0x31, 0x20, + 0x00, 0x00, 0xff, 0xff, 0x51, 0x47, 0xc3, 0xa7, 0x01, 0x02, 0x00, 0x00, } func (m *DeviceSoftwareCompliance) Marshal() (dAtA []byte, err error) { @@ -274,7 +272,7 @@ func (m *DeviceSoftwareCompliance) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.ComplianceInfo = append(m.ComplianceInfo, &dclcompltypes.ComplianceInfo{}) + m.ComplianceInfo = append(m.ComplianceInfo, &ComplianceInfo{}) if err := m.ComplianceInfo[len(m.ComplianceInfo)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/x/compliance/types/errors.go b/x/compliance/types/errors.go index 58647e0c2..c035b001c 100644 --- a/x/compliance/types/errors.go +++ b/x/compliance/types/errors.go @@ -3,26 +3,26 @@ package types // DONTCOVER import ( - sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "cosmossdk.io/errors" ) // x/compliance module sentinel errors. var ( - ErrComplianceInfoAlreadyExist = sdkerrors.Register(ModuleName, 301, "compliance info already exist") - ErrInconsistentDates = sdkerrors.Register(ModuleName, 302, "inconsistent dates") - ErrAlreadyCertified = sdkerrors.Register(ModuleName, 303, "model already certified") - ErrAlreadyRevoked = sdkerrors.Register(ModuleName, 304, "model already revoked") - ErrAlreadyProvisional = sdkerrors.Register(ModuleName, 305, "model already in provisional state") - ErrModelVersionStringDoesNotMatch = sdkerrors.Register(ModuleName, 306, "model version does not match") - ErrInvalidTestDateFormat = sdkerrors.Register(ModuleName, 307, "test date must be in RFC3339 format") - ErrInvalidCertificationType = sdkerrors.Register(ModuleName, 308, "invalid certification type") - ErrInvalidPFCCertificationRoute = sdkerrors.Register(ModuleName, 309, "invalid PFC certification route") - ErrComplianceInfoDoesNotExist = sdkerrors.Register(ModuleName, 310, "compliance info not found") - ErrInvalidUint32ForCdVersionNumber = sdkerrors.Register(ModuleName, 311, "invalid uint32 for cd version number") + ErrComplianceInfoAlreadyExist = errors.Register(ModuleName, 301, "compliance info already exist") + ErrInconsistentDates = errors.Register(ModuleName, 302, "inconsistent dates") + ErrAlreadyCertified = errors.Register(ModuleName, 303, "model already certified") + ErrAlreadyRevoked = errors.Register(ModuleName, 304, "model already revoked") + ErrAlreadyProvisional = errors.Register(ModuleName, 305, "model already in provisional state") + ErrModelVersionStringDoesNotMatch = errors.Register(ModuleName, 306, "model version does not match") + ErrInvalidTestDateFormat = errors.Register(ModuleName, 307, "test date must be in RFC3339 format") + ErrInvalidCertificationType = errors.Register(ModuleName, 308, "invalid certification type") + ErrInvalidPFCCertificationRoute = errors.Register(ModuleName, 309, "invalid PFC certification route") + ErrComplianceInfoDoesNotExist = errors.Register(ModuleName, 310, "compliance info not found") + ErrInvalidUint32ForCdVersionNumber = errors.Register(ModuleName, 311, "invalid uint32 for cd version number") ) func NewErrInconsistentDates(err interface{}) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrInconsistentDates, "%v", err, @@ -30,7 +30,7 @@ func NewErrInconsistentDates(err interface{}) error { } func NewErrAlreadyCertified(vid interface{}, pid interface{}, sv interface{}, certificationType interface{}) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrAlreadyCertified, "Model with vid=%v, pid=%v, softwareVersion=%v, certificationType=%v already certified on the ledger", vid, pid, sv, certificationType, @@ -38,7 +38,7 @@ func NewErrAlreadyCertified(vid interface{}, pid interface{}, sv interface{}, ce } func NewErrAlreadyRevoked(vid interface{}, pid interface{}, sv interface{}, certificationType interface{}) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrAlreadyRevoked, "Model with vid=%v, pid=%v, softwareVersion=%v, certificationType=%v already revoked on the ledger", vid, pid, sv, certificationType, @@ -46,7 +46,7 @@ func NewErrAlreadyRevoked(vid interface{}, pid interface{}, sv interface{}, cert } func NewErrAlreadyProvisional(vid interface{}, pid interface{}, sv interface{}, certificationType interface{}) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrAlreadyProvisional, "Model with vid=%v, pid=%v, softwareVersion=%v, certificationType=%v is already in provisional state on the ledger", vid, pid, sv, certificationType, @@ -54,7 +54,7 @@ func NewErrAlreadyProvisional(vid interface{}, pid interface{}, sv interface{}, } func NewErrComplianceInfoAlreadyExist(vid interface{}, pid interface{}, sv interface{}, certificationType interface{}) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrComplianceInfoAlreadyExist, "Model with vid=%v, pid=%v, softwareVersion=%v, certificationType=%v already has compliance info on the ledger", vid, pid, sv, certificationType, @@ -62,7 +62,7 @@ func NewErrComplianceInfoAlreadyExist(vid interface{}, pid interface{}, sv inter } func NewErrComplianceInfoDoesNotExist(vid interface{}, pid interface{}, sv interface{}, certificationType interface{}) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrComplianceInfoDoesNotExist, "Model with vid=%v, pid=%v, softwareVersion=%v, certificationType=%v has no compliance info on the ledger", vid, pid, sv, certificationType, @@ -70,7 +70,7 @@ func NewErrComplianceInfoDoesNotExist(vid interface{}, pid interface{}, sv inter } func NewErrInvalidUint32ForCdVersionNumber(vid interface{}, pid interface{}, sv interface{}, certificationType interface{}, cdVersionNumber interface{}) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrInvalidUint32ForCdVersionNumber, "Compliance info with vid=%v, pid=%v, softwareVersion=%v, certificationType=%v cannot be updated with an invalid uint32 cd version number %v", vid, pid, sv, certificationType, cdVersionNumber, @@ -80,7 +80,7 @@ func NewErrInvalidUint32ForCdVersionNumber(vid interface{}, pid interface{}, sv func NewErrModelVersionStringDoesNotMatch(vid interface{}, pid interface{}, softwareVersion interface{}, softwareVersionString interface{}, ) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrModelVersionStringDoesNotMatch, "Model with vid=%v, pid=%v, softwareVersion=%v present on the ledger does not have"+ " matching softwareVersionString=%v", @@ -91,7 +91,7 @@ func NewErrModelVersionStringDoesNotMatch(vid interface{}, pid interface{}, func NewErrModelVersionCDVersionNumberDoesNotMatch(vid interface{}, pid interface{}, softwareVersion interface{}, cDVersionNumber interface{}, ) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrModelVersionStringDoesNotMatch, "Model with vid=%v, pid=%v, softwareVersion=%v present on the ledger does not have"+ " matching CDVersionNumber=%v", @@ -100,21 +100,21 @@ func NewErrModelVersionCDVersionNumberDoesNotMatch(vid interface{}, pid interfac } func NewErrInvalidTestDateFormat(testDate interface{}) error { - return sdkerrors.Wrapf(ErrInvalidTestDateFormat, + return errors.Wrapf(ErrInvalidTestDateFormat, "Invalid TestDate \"%v\": it must be RFC3339 encoded date, for example 2019-10-12T07:20:50.52Z", testDate, ) } func NewErrInvalidCertificationType(certType interface{}, certList interface{}) error { - return sdkerrors.Wrapf(ErrInvalidCertificationType, + return errors.Wrapf(ErrInvalidCertificationType, "Invalid CertificationType: \"%s\". Supported types: [%s]", certType, certList, ) } func NewErrInvalidPFCCertificationRoute(certRoute interface{}, certList interface{}) error { - return sdkerrors.Wrapf(ErrInvalidPFCCertificationRoute, + return errors.Wrapf(ErrInvalidPFCCertificationRoute, "Invalid PFCCertificationRoute: \"%s\". Supported types: [%s]", certRoute, certList, ) diff --git a/x/compliance/types/genesis.go b/x/compliance/types/genesis.go index a97b3b947..3ce66c085 100644 --- a/x/compliance/types/genesis.go +++ b/x/compliance/types/genesis.go @@ -2,8 +2,6 @@ package types import ( "fmt" - - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" ) // DefaultIndex is the default capability global index. @@ -12,7 +10,7 @@ const DefaultIndex uint64 = 1 // DefaultGenesis returns the default Capability genesis state. func DefaultGenesis() *GenesisState { return &GenesisState{ - ComplianceInfoList: []dclcompltypes.ComplianceInfo{}, + ComplianceInfoList: []ComplianceInfo{}, CertifiedModelList: []CertifiedModel{}, RevokedModelList: []RevokedModel{}, ProvisionalModelList: []ProvisionalModel{}, diff --git a/x/compliance/types/genesis.pb.go b/x/compliance/types/genesis.pb.go index 87fd77e37..8b94a7451 100644 --- a/x/compliance/types/genesis.pb.go +++ b/x/compliance/types/genesis.pb.go @@ -1,17 +1,15 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: compliance/genesis.proto +// source: zigbeealliance/distributedcomplianceledger/compliance/genesis.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" - - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" ) // Reference imports to suppress errors if they are not otherwise used. @@ -27,18 +25,18 @@ const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package // GenesisState defines the compliance module's genesis state. type GenesisState struct { - ComplianceInfoList []dclcompltypes.ComplianceInfo `protobuf:"bytes,1,rep,name=complianceInfoList,proto3" json:"complianceInfoList"` - CertifiedModelList []CertifiedModel `protobuf:"bytes,2,rep,name=certifiedModelList,proto3" json:"certifiedModelList"` - RevokedModelList []RevokedModel `protobuf:"bytes,3,rep,name=revokedModelList,proto3" json:"revokedModelList"` - ProvisionalModelList []ProvisionalModel `protobuf:"bytes,4,rep,name=provisionalModelList,proto3" json:"provisionalModelList"` - DeviceSoftwareComplianceList []DeviceSoftwareCompliance `protobuf:"bytes,5,rep,name=deviceSoftwareComplianceList,proto3" json:"deviceSoftwareComplianceList"` + ComplianceInfoList []ComplianceInfo `protobuf:"bytes,1,rep,name=complianceInfoList,proto3" json:"complianceInfoList"` + CertifiedModelList []CertifiedModel `protobuf:"bytes,2,rep,name=certifiedModelList,proto3" json:"certifiedModelList"` + RevokedModelList []RevokedModel `protobuf:"bytes,3,rep,name=revokedModelList,proto3" json:"revokedModelList"` + ProvisionalModelList []ProvisionalModel `protobuf:"bytes,4,rep,name=provisionalModelList,proto3" json:"provisionalModelList"` + DeviceSoftwareComplianceList []DeviceSoftwareCompliance `protobuf:"bytes,5,rep,name=deviceSoftwareComplianceList,proto3" json:"deviceSoftwareComplianceList"` } func (m *GenesisState) Reset() { *m = GenesisState{} } func (m *GenesisState) String() string { return proto.CompactTextString(m) } func (*GenesisState) ProtoMessage() {} func (*GenesisState) Descriptor() ([]byte, []int) { - return fileDescriptor_a491f9d8ec824f82, []int{0} + return fileDescriptor_8cf32961a707b14c, []int{0} } func (m *GenesisState) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -67,7 +65,7 @@ func (m *GenesisState) XXX_DiscardUnknown() { var xxx_messageInfo_GenesisState proto.InternalMessageInfo -func (m *GenesisState) GetComplianceInfoList() []dclcompltypes.ComplianceInfo { +func (m *GenesisState) GetComplianceInfoList() []ComplianceInfo { if m != nil { return m.ComplianceInfoList } @@ -106,35 +104,37 @@ func init() { proto.RegisterType((*GenesisState)(nil), "zigbeealliance.distributedcomplianceledger.compliance.GenesisState") } -func init() { proto.RegisterFile("compliance/genesis.proto", fileDescriptor_a491f9d8ec824f82) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/compliance/genesis.proto", fileDescriptor_8cf32961a707b14c) +} -var fileDescriptor_a491f9d8ec824f82 = []byte{ - // 391 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x93, 0xcf, 0x4e, 0xf2, 0x40, - 0x14, 0xc5, 0xdb, 0x0f, 0xf8, 0x16, 0xd5, 0x85, 0x69, 0x58, 0x10, 0x63, 0x2a, 0x61, 0x65, 0x62, - 0x68, 0x13, 0x8d, 0x2f, 0x00, 0x1a, 0x62, 0xd4, 0x68, 0x60, 0xe7, 0xa6, 0x69, 0xe9, 0x6d, 0x9d, - 0x58, 0x3a, 0x4d, 0x67, 0xc0, 0x7f, 0x2b, 0xdf, 0xc0, 0xad, 0x2b, 0x5f, 0x87, 0x25, 0x4b, 0x57, - 0xc6, 0xc0, 0x8b, 0x18, 0x66, 0x06, 0x67, 0x0a, 0xe8, 0xa2, 0xee, 0x6e, 0x7a, 0xcf, 0x9c, 0xdf, - 0xe9, 0xbd, 0x33, 0x46, 0xad, 0x8f, 0x07, 0x69, 0x8c, 0xbc, 0xa4, 0x0f, 0x4e, 0x04, 0x09, 0x10, - 0x44, 0xec, 0x34, 0xc3, 0x14, 0x9b, 0x47, 0x8f, 0x28, 0xf2, 0x01, 0xbc, 0x98, 0x77, 0xed, 0x00, - 0x11, 0x9a, 0x21, 0x7f, 0x48, 0x21, 0x90, 0x67, 0x62, 0x08, 0x22, 0xc8, 0x6c, 0xf9, 0x61, 0xbb, - 0xae, 0x18, 0xca, 0xd2, 0x45, 0x49, 0x88, 0xb9, 0x71, 0x5e, 0x01, 0x19, 0x45, 0x21, 0x82, 0xc0, - 0x1d, 0xe0, 0x00, 0x62, 0xa1, 0xb0, 0x14, 0x45, 0x06, 0x23, 0x7c, 0xbb, 0xd4, 0x6f, 0x28, 0xfd, - 0x34, 0xc3, 0x23, 0x44, 0x10, 0x4e, 0xbc, 0x38, 0xa7, 0xd9, 0x57, 0x34, 0x01, 0x8c, 0x50, 0x1f, - 0x5c, 0x82, 0x43, 0x7a, 0xe7, 0x65, 0xe0, 0xca, 0x96, 0x10, 0x57, 0x23, 0x1c, 0x61, 0x56, 0x3a, - 0xf3, 0x8a, 0x7f, 0x6d, 0xbc, 0x55, 0x8c, 0xcd, 0x0e, 0x9f, 0x49, 0x8f, 0x7a, 0x14, 0xcc, 0x27, - 0xc3, 0x94, 0x47, 0x4f, 0x93, 0x10, 0x9f, 0x23, 0x42, 0x6b, 0x7a, 0xbd, 0xb4, 0xb7, 0x71, 0x70, - 0x62, 0x17, 0x9a, 0x97, 0xdd, 0xce, 0x19, 0xb6, 0xca, 0xe3, 0x8f, 0x5d, 0xad, 0xbb, 0x06, 0xc3, - 0xe0, 0x8b, 0x69, 0x5d, 0xcc, 0x7f, 0x94, 0xc1, 0xff, 0xfd, 0x0d, 0x9e, 0x33, 0xfc, 0x86, 0xaf, - 0x60, 0xcc, 0xa1, 0xb1, 0x25, 0x16, 0x21, 0xd1, 0x25, 0x86, 0x6e, 0x17, 0x44, 0x77, 0x15, 0x3b, - 0x01, 0x5e, 0x41, 0x98, 0xcf, 0xba, 0x51, 0x55, 0x16, 0x2c, 0xd9, 0x65, 0xc6, 0xee, 0x14, 0x64, - 0x5f, 0x2d, 0x59, 0x0a, 0xfe, 0x5a, 0x94, 0xf9, 0xaa, 0x1b, 0x3b, 0xfc, 0x02, 0xf5, 0xc4, 0xfd, - 0x91, 0x2b, 0x63, 0x59, 0x2a, 0x2c, 0xcb, 0x65, 0xc1, 0x2c, 0xc7, 0x3f, 0x58, 0x8b, 0x4c, 0xbf, - 0xa2, 0x5b, 0x30, 0x9e, 0x5a, 0xfa, 0x64, 0x6a, 0xe9, 0x9f, 0x53, 0x4b, 0x7f, 0x99, 0x59, 0xda, - 0x64, 0x66, 0x69, 0xef, 0x33, 0x4b, 0xbb, 0x3e, 0x8b, 0x10, 0xbd, 0x19, 0xfa, 0x73, 0x92, 0xc3, - 0x83, 0x35, 0x17, 0xc9, 0x1c, 0x25, 0x59, 0x53, 0x26, 0x69, 0xf2, 0x6c, 0xce, 0xbd, 0xf2, 0x6c, - 0x1d, 0xfa, 0x90, 0x02, 0xf1, 0xff, 0xb3, 0xf7, 0x70, 0xf8, 0x15, 0x00, 0x00, 0xff, 0xff, 0x90, - 0xd8, 0x15, 0xf3, 0x2d, 0x04, 0x00, 0x00, +var fileDescriptor_8cf32961a707b14c = []byte{ + // 393 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x93, 0xcf, 0x4e, 0xfa, 0x40, + 0x10, 0xc7, 0xdb, 0x1f, 0xf0, 0x3b, 0x54, 0x0f, 0xa6, 0xe1, 0x60, 0x8c, 0xa9, 0xc6, 0x93, 0x17, + 0xda, 0x44, 0xe3, 0x0b, 0x80, 0x86, 0x10, 0x24, 0x1a, 0x48, 0x3c, 0x78, 0x69, 0x5a, 0x3a, 0xad, + 0x1b, 0x4b, 0xb7, 0xd9, 0x5d, 0xf0, 0xdf, 0xc9, 0x37, 0xf0, 0xea, 0xc9, 0xd7, 0xe1, 0xc8, 0xd1, + 0x93, 0x31, 0xf0, 0x22, 0x86, 0xed, 0xea, 0x52, 0xa8, 0x1c, 0xd6, 0xdb, 0x64, 0x9a, 0x7e, 0x3e, + 0xdf, 0xec, 0xcc, 0x18, 0x8d, 0x47, 0x14, 0xf9, 0x00, 0x5e, 0x1c, 0x23, 0x2f, 0xe9, 0x83, 0x13, + 0x20, 0xca, 0x08, 0xf2, 0x87, 0x0c, 0x82, 0x3e, 0x1e, 0xa4, 0x59, 0x37, 0x86, 0x20, 0x02, 0xe2, + 0xc8, 0x86, 0x13, 0x41, 0x02, 0x14, 0x51, 0x3b, 0x25, 0x98, 0x61, 0xf3, 0x24, 0x0f, 0xb1, 0xd7, + 0x40, 0x6c, 0xd9, 0xd8, 0x69, 0xab, 0xb9, 0x65, 0xe9, 0xa2, 0x24, 0xc4, 0x59, 0x06, 0x65, 0x18, + 0x10, 0x86, 0x42, 0x04, 0x81, 0x3b, 0xc0, 0x01, 0xc4, 0x02, 0xd6, 0x52, 0x83, 0x11, 0x18, 0xe1, + 0xdb, 0x25, 0x54, 0x47, 0x0d, 0x95, 0x12, 0x3c, 0x42, 0x14, 0xe1, 0xc4, 0x8b, 0x73, 0xb8, 0x2b, + 0x35, 0x5c, 0x00, 0x23, 0xd4, 0x07, 0x97, 0xe2, 0x90, 0xdd, 0x79, 0x04, 0x5c, 0xf9, 0x49, 0x70, + 0xab, 0x11, 0x8e, 0x30, 0x2f, 0x9d, 0x79, 0x95, 0x75, 0x0f, 0xde, 0x2a, 0xc6, 0x66, 0x33, 0x1b, + 0x75, 0x8f, 0x79, 0x0c, 0xcc, 0x27, 0xc3, 0x94, 0xbf, 0xb6, 0x92, 0x10, 0x9f, 0x23, 0xca, 0xb6, + 0xf5, 0xfd, 0xd2, 0xe1, 0xc6, 0xd1, 0x99, 0xad, 0xb4, 0x06, 0x76, 0x23, 0x07, 0xac, 0x97, 0xc7, + 0x1f, 0x7b, 0x5a, 0xb7, 0x40, 0xc3, 0xe5, 0xdf, 0xe3, 0xea, 0xcc, 0xdf, 0x84, 0xcb, 0xff, 0xfd, + 0x4d, 0x9e, 0x03, 0xfe, 0xc8, 0x57, 0x34, 0xe6, 0xd0, 0xd8, 0x12, 0xe3, 0x95, 0xea, 0x12, 0x57, + 0x37, 0x14, 0xd5, 0xdd, 0x05, 0x9c, 0x10, 0xaf, 0x28, 0xcc, 0x67, 0xdd, 0xa8, 0x2e, 0xec, 0x82, + 0x74, 0x97, 0xb9, 0xbb, 0xa9, 0xe8, 0xbe, 0x5c, 0x42, 0x0a, 0x7f, 0xa1, 0xca, 0x7c, 0xd5, 0x8d, + 0xdd, 0x6c, 0x81, 0x7a, 0x62, 0x7f, 0xe4, 0xc8, 0x78, 0x96, 0x0a, 0xcf, 0x72, 0xa1, 0x98, 0xe5, + 0xf4, 0x17, 0xb4, 0xc8, 0xb4, 0x56, 0x5d, 0x87, 0xf1, 0xd4, 0xd2, 0x27, 0x53, 0x4b, 0xff, 0x9c, + 0x5a, 0xfa, 0xcb, 0xcc, 0xd2, 0x26, 0x33, 0x4b, 0x7b, 0x9f, 0x59, 0xda, 0x75, 0x3b, 0x42, 0xec, + 0x66, 0xe8, 0xcf, 0x4d, 0x4e, 0x16, 0xac, 0x56, 0x74, 0x35, 0x35, 0x99, 0xa4, 0x26, 0xee, 0xe6, + 0x7e, 0xf1, 0x72, 0xd8, 0x43, 0x0a, 0xd4, 0xff, 0xcf, 0xef, 0xe1, 0xf8, 0x2b, 0x00, 0x00, 0xff, + 0xff, 0x4d, 0x88, 0xa3, 0x5b, 0x2f, 0x05, 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { @@ -344,7 +344,7 @@ func (m *GenesisState) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.ComplianceInfoList = append(m.ComplianceInfoList, dclcompltypes.ComplianceInfo{}) + m.ComplianceInfoList = append(m.ComplianceInfoList, ComplianceInfo{}) if err := m.ComplianceInfoList[len(m.ComplianceInfoList)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/x/compliance/types/message_certify_model.go b/x/compliance/types/message_certify_model.go index 3a7ce5401..163f15fe6 100644 --- a/x/compliance/types/message_certify_model.go +++ b/x/compliance/types/message_certify_model.go @@ -3,9 +3,9 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" ) @@ -72,7 +72,7 @@ func (msg *MsgCertifyModel) GetSignBytes() []byte { func (msg *MsgCertifyModel) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) @@ -85,12 +85,12 @@ func (msg *MsgCertifyModel) ValidateBasic() error { return NewErrInvalidTestDateFormat(msg.CertificationDate) } - if !dclcompltypes.IsValidCertificationType(msg.CertificationType) { - return NewErrInvalidCertificationType(msg.CertificationType, dclcompltypes.CertificationTypesList) + if !IsValidCertificationType(msg.CertificationType) { + return NewErrInvalidCertificationType(msg.CertificationType, CertificationTypesList) } - if !dclcompltypes.IsValidPFCCertificationRoute(msg.ParentChild) { - return NewErrInvalidPFCCertificationRoute(msg.ParentChild, dclcompltypes.PFCCertificationRouteList) + if !IsValidPFCCertificationRoute(msg.ParentChild) { + return NewErrInvalidPFCCertificationRoute(msg.ParentChild, PFCCertificationRouteList) } return nil diff --git a/x/compliance/types/message_certify_model_test.go b/x/compliance/types/message_certify_model_test.go index 678adfbf3..74288221c 100644 --- a/x/compliance/types/message_certify_model_test.go +++ b/x/compliance/types/message_certify_model_test.go @@ -4,9 +4,9 @@ import ( fmt "fmt" "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" diff --git a/x/compliance/types/message_delete_compliance_info.go b/x/compliance/types/message_delete_compliance_info.go index 85ab60751..daec2ec7d 100644 --- a/x/compliance/types/message_delete_compliance_info.go +++ b/x/compliance/types/message_delete_compliance_info.go @@ -1,9 +1,9 @@ package types import ( + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" ) @@ -47,7 +47,7 @@ func (msg *MsgDeleteComplianceInfo) GetSignBytes() []byte { func (msg *MsgDeleteComplianceInfo) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) @@ -55,8 +55,8 @@ func (msg *MsgDeleteComplianceInfo) ValidateBasic() error { return err } - if !dclcompltypes.IsValidCertificationType(msg.CertificationType) { - return NewErrInvalidCertificationType(msg.CertificationType, dclcompltypes.CertificationTypesList) + if !IsValidCertificationType(msg.CertificationType) { + return NewErrInvalidCertificationType(msg.CertificationType, CertificationTypesList) } return nil diff --git a/x/compliance/types/message_provision_model.go b/x/compliance/types/message_provision_model.go index 3c507f4a8..01ff71e84 100644 --- a/x/compliance/types/message_provision_model.go +++ b/x/compliance/types/message_provision_model.go @@ -3,9 +3,9 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" ) @@ -72,7 +72,7 @@ func (msg *MsgProvisionModel) GetSignBytes() []byte { func (msg *MsgProvisionModel) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) @@ -85,12 +85,12 @@ func (msg *MsgProvisionModel) ValidateBasic() error { return NewErrInvalidTestDateFormat(msg.ProvisionalDate) } - if !dclcompltypes.IsValidCertificationType(msg.CertificationType) { - return NewErrInvalidCertificationType(msg.CertificationType, dclcompltypes.CertificationTypesList) + if !IsValidCertificationType(msg.CertificationType) { + return NewErrInvalidCertificationType(msg.CertificationType, CertificationTypesList) } - if !dclcompltypes.IsValidPFCCertificationRoute(msg.ParentChild) { - return NewErrInvalidPFCCertificationRoute(msg.ParentChild, dclcompltypes.PFCCertificationRouteList) + if !IsValidPFCCertificationRoute(msg.ParentChild) { + return NewErrInvalidPFCCertificationRoute(msg.ParentChild, PFCCertificationRouteList) } return nil diff --git a/x/compliance/types/message_provision_model_test.go b/x/compliance/types/message_provision_model_test.go index 5d1021453..834fdc7ac 100644 --- a/x/compliance/types/message_provision_model_test.go +++ b/x/compliance/types/message_provision_model_test.go @@ -4,9 +4,9 @@ import ( fmt "fmt" "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" diff --git a/x/compliance/types/message_revoke_model.go b/x/compliance/types/message_revoke_model.go index 72a7b51b3..6d9661fa9 100644 --- a/x/compliance/types/message_revoke_model.go +++ b/x/compliance/types/message_revoke_model.go @@ -3,9 +3,9 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" ) @@ -57,7 +57,7 @@ func (msg *MsgRevokeModel) GetSignBytes() []byte { func (msg *MsgRevokeModel) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) @@ -70,8 +70,8 @@ func (msg *MsgRevokeModel) ValidateBasic() error { return NewErrInvalidTestDateFormat(msg.RevocationDate) } - if !dclcompltypes.IsValidCertificationType(msg.CertificationType) { - return NewErrInvalidCertificationType(msg.CertificationType, dclcompltypes.CertificationTypesList) + if !IsValidCertificationType(msg.CertificationType) { + return NewErrInvalidCertificationType(msg.CertificationType, CertificationTypesList) } return nil diff --git a/x/compliance/types/message_revoke_model_test.go b/x/compliance/types/message_revoke_model_test.go index f15444738..4b6ef678d 100644 --- a/x/compliance/types/message_revoke_model_test.go +++ b/x/compliance/types/message_revoke_model_test.go @@ -4,9 +4,9 @@ import ( fmt "fmt" "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" diff --git a/x/compliance/types/message_update_compliance_info.go b/x/compliance/types/message_update_compliance_info.go index 21354c379..c43f40181 100644 --- a/x/compliance/types/message_update_compliance_info.go +++ b/x/compliance/types/message_update_compliance_info.go @@ -4,9 +4,10 @@ import ( "strconv" "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" + "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" ) @@ -72,7 +73,7 @@ func (msg *MsgUpdateComplianceInfo) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } err = validator.Validate(msg) @@ -97,16 +98,16 @@ func (msg *MsgUpdateComplianceInfo) ValidateBasic() error { } if cdVersionNumber > 65535 { - return sdkerrors.Wrap(validator.ErrFieldUpperBoundViolated, "CDVersionNumber must not be greater than 65535: field upper bound violatedError") + return errors.Wrap(validator.ErrFieldUpperBoundViolated, "CDVersionNumber must not be greater than 65535: field upper bound violatedError") } } - if !dclcompltypes.IsValidCertificationType(msg.CertificationType) { - return NewErrInvalidCertificationType(msg.CertificationType, dclcompltypes.CertificationTypesList) + if !IsValidCertificationType(msg.CertificationType) { + return NewErrInvalidCertificationType(msg.CertificationType, CertificationTypesList) } - if !dclcompltypes.IsValidPFCCertificationRoute(msg.ParentChild) { - return NewErrInvalidPFCCertificationRoute(msg.ParentChild, dclcompltypes.PFCCertificationRouteList) + if !IsValidPFCCertificationRoute(msg.ParentChild) { + return NewErrInvalidPFCCertificationRoute(msg.ParentChild, PFCCertificationRouteList) } return nil diff --git a/x/compliance/types/message_update_compliance_info_test.go b/x/compliance/types/message_update_compliance_info_test.go index 344180758..e9868e494 100644 --- a/x/compliance/types/message_update_compliance_info_test.go +++ b/x/compliance/types/message_update_compliance_info_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" diff --git a/x/compliance/types/provisional_model.pb.go b/x/compliance/types/provisional_model.pb.go index 95908c6c2..d92e59e6f 100644 --- a/x/compliance/types/provisional_model.pb.go +++ b/x/compliance/types/provisional_model.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: compliance/provisional_model.proto +// source: zigbeealliance/distributedcomplianceledger/compliance/provisional_model.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -34,7 +34,7 @@ func (m *ProvisionalModel) Reset() { *m = ProvisionalModel{} } func (m *ProvisionalModel) String() string { return proto.CompactTextString(m) } func (*ProvisionalModel) ProtoMessage() {} func (*ProvisionalModel) Descriptor() ([]byte, []int) { - return fileDescriptor_228e3edf2a47c81f, []int{0} + return fileDescriptor_7ee36b89c15e0be3, []int{0} } func (m *ProvisionalModel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -103,29 +103,29 @@ func init() { } func init() { - proto.RegisterFile("compliance/provisional_model.proto", fileDescriptor_228e3edf2a47c81f) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/compliance/provisional_model.proto", fileDescriptor_7ee36b89c15e0be3) } -var fileDescriptor_228e3edf2a47c81f = []byte{ - // 273 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x90, 0xb1, 0x4a, 0xc4, 0x30, - 0x1c, 0xc6, 0x2f, 0x9e, 0x15, 0x0d, 0x88, 0x67, 0x71, 0xe8, 0x14, 0xca, 0x4d, 0x1d, 0x6c, 0x3b, - 0x88, 0x2f, 0xe0, 0x2a, 0x82, 0x14, 0x71, 0x70, 0x91, 0xb4, 0xf9, 0x5f, 0xfd, 0x43, 0xae, 0x09, - 0x69, 0x5a, 0x3d, 0x9f, 0xc2, 0x87, 0xf0, 0x61, 0x1c, 0x6f, 0x74, 0x94, 0xf6, 0x45, 0x24, 0x06, - 0xe9, 0xa1, 0x5b, 0xf2, 0x4b, 0xbe, 0x0f, 0xbe, 0x1f, 0x5d, 0x56, 0x6a, 0xad, 0x25, 0xf2, 0xa6, - 0x82, 0x5c, 0x1b, 0xd5, 0x63, 0x8b, 0xaa, 0xe1, 0xf2, 0x71, 0xad, 0x04, 0xc8, 0x4c, 0x1b, 0x65, - 0x55, 0x78, 0xf9, 0x8a, 0x75, 0x09, 0xc0, 0xa5, 0xff, 0x97, 0x09, 0x6c, 0xad, 0xc1, 0xb2, 0xb3, - 0x20, 0xa6, 0xb4, 0x04, 0x51, 0x83, 0xc9, 0x26, 0xb0, 0x7c, 0x27, 0x74, 0x71, 0x3b, 0x55, 0xde, - 0xb8, 0xc6, 0x70, 0x41, 0xe7, 0x3d, 0x8a, 0x88, 0xc4, 0x24, 0x09, 0x0a, 0x77, 0x74, 0x44, 0xa3, - 0x88, 0xf6, 0x3c, 0xd1, 0x28, 0xc2, 0x84, 0x9e, 0xb4, 0x6a, 0x65, 0x9f, 0xb9, 0x81, 0x7b, 0x30, - 0x2e, 0x1d, 0xcd, 0x63, 0x92, 0x1c, 0x17, 0x7f, 0x71, 0x78, 0x4e, 0x4f, 0x2b, 0x30, 0x16, 0x57, - 0x58, 0x71, 0x8b, 0xaa, 0xb9, 0xdb, 0x68, 0x88, 0xf6, 0x63, 0x92, 0x1c, 0x15, 0xff, 0x1f, 0xc2, - 0x33, 0x1a, 0xf4, 0x5c, 0x76, 0x10, 0x05, 0x31, 0x49, 0x0e, 0x0b, 0x7f, 0xb9, 0x82, 0x8f, 0x81, - 0x91, 0xed, 0xc0, 0xc8, 0xd7, 0xc0, 0xc8, 0xdb, 0xc8, 0x66, 0xdb, 0x91, 0xcd, 0x3e, 0x47, 0x36, - 0x7b, 0xb8, 0xae, 0xd1, 0x3e, 0x75, 0xa5, 0xdb, 0x94, 0x7b, 0x05, 0xe9, 0xaf, 0x83, 0x7c, 0xc7, - 0x41, 0x3a, 0x6d, 0x4e, 0xbd, 0x85, 0xfc, 0x25, 0xdf, 0xd1, 0x6a, 0x37, 0x1a, 0xda, 0xf2, 0xe0, - 0xc7, 0xe5, 0xc5, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0xa0, 0xb1, 0xa1, 0xd3, 0x71, 0x01, 0x00, - 0x00, +var fileDescriptor_7ee36b89c15e0be3 = []byte{ + // 274 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xbf, 0x4a, 0xc5, 0x30, + 0x18, 0x47, 0x1b, 0xaf, 0x15, 0x0d, 0x88, 0xd7, 0xe2, 0xd0, 0x29, 0x14, 0xa7, 0x0e, 0xb6, 0x1d, + 0xc4, 0x17, 0x70, 0x95, 0x0b, 0x52, 0xc4, 0xc1, 0x45, 0xd2, 0xe6, 0xbb, 0xf5, 0x83, 0xdc, 0x26, + 0xa4, 0x69, 0xf5, 0xfa, 0x14, 0x3e, 0x84, 0x0f, 0xe3, 0x78, 0x47, 0x47, 0x69, 0x5f, 0x44, 0x62, + 0x85, 0xfa, 0x0f, 0xb7, 0xe4, 0x84, 0x1c, 0xf8, 0x1d, 0xba, 0x78, 0xc4, 0xaa, 0x00, 0xe0, 0x52, + 0x22, 0xaf, 0x4b, 0xc8, 0x04, 0x36, 0xd6, 0x60, 0xd1, 0x5a, 0x10, 0xa5, 0x5a, 0xe9, 0x91, 0x4a, + 0x10, 0x15, 0x98, 0x6c, 0x02, 0x99, 0x36, 0xaa, 0xc3, 0x06, 0x55, 0xcd, 0xe5, 0xed, 0x4a, 0x09, + 0x90, 0xa9, 0x36, 0xca, 0xaa, 0xe0, 0xec, 0xbb, 0x2e, 0xfd, 0x47, 0x97, 0x4e, 0xe0, 0xf8, 0x99, + 0xd0, 0xf9, 0xe5, 0xa4, 0x5c, 0x38, 0x63, 0x30, 0xa7, 0xb3, 0x0e, 0x45, 0x48, 0x22, 0x12, 0xfb, + 0xb9, 0x3b, 0x3a, 0xa2, 0x51, 0x84, 0x5b, 0x23, 0xd1, 0x28, 0x82, 0x98, 0x1e, 0x34, 0x6a, 0x69, + 0xef, 0xb9, 0x81, 0x6b, 0x30, 0xee, 0x77, 0x38, 0x8b, 0x48, 0xbc, 0x9f, 0xff, 0xc4, 0xc1, 0x09, + 0x3d, 0x2c, 0xc1, 0x58, 0x5c, 0x62, 0xc9, 0x2d, 0xaa, 0xfa, 0x6a, 0xad, 0x21, 0xdc, 0x8e, 0x48, + 0xbc, 0x97, 0xff, 0x7e, 0x08, 0x8e, 0xa8, 0xdf, 0x71, 0xd9, 0x42, 0xe8, 0x47, 0x24, 0xde, 0xcd, + 0xc7, 0xcb, 0x39, 0xbc, 0xf4, 0x8c, 0x6c, 0x7a, 0x46, 0xde, 0x7a, 0x46, 0x9e, 0x06, 0xe6, 0x6d, + 0x06, 0xe6, 0xbd, 0x0e, 0xcc, 0xbb, 0xb9, 0xa8, 0xd0, 0xde, 0xb5, 0x85, 0xdb, 0x94, 0x8d, 0x09, + 0x92, 0xbf, 0x92, 0x26, 0xd3, 0xe6, 0xe4, 0x33, 0xea, 0xc3, 0xd7, 0xac, 0x76, 0xad, 0xa1, 0x29, + 0x76, 0x3e, 0x5a, 0x9e, 0xbe, 0x07, 0x00, 0x00, 0xff, 0xff, 0x91, 0xd3, 0x94, 0x27, 0x9c, 0x01, + 0x00, 0x00, } func (m *ProvisionalModel) Marshal() (dAtA []byte, err error) { diff --git a/x/compliance/types/query.pb.go b/x/compliance/types/query.pb.go index aaf2e81c3..9e57502de 100644 --- a/x/compliance/types/query.pb.go +++ b/x/compliance/types/query.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: compliance/query.proto +// source: zigbeealliance/distributedcomplianceledger/compliance/query.proto package types @@ -7,9 +7,9 @@ import ( context "context" fmt "fmt" query "github.com/cosmos/cosmos-sdk/types/query" - _ "github.com/gogo/protobuf/gogoproto" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -17,8 +17,6 @@ import ( io "io" math "math" math_bits "math/bits" - - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" ) // Reference imports to suppress errors if they are not otherwise used. @@ -43,7 +41,7 @@ func (m *QueryGetComplianceInfoRequest) Reset() { *m = QueryGetComplianc func (m *QueryGetComplianceInfoRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetComplianceInfoRequest) ProtoMessage() {} func (*QueryGetComplianceInfoRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{0} + return fileDescriptor_485cf092304eb4f7, []int{0} } func (m *QueryGetComplianceInfoRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -101,14 +99,14 @@ func (m *QueryGetComplianceInfoRequest) GetCertificationType() string { } type QueryGetComplianceInfoResponse struct { - ComplianceInfo dclcompltypes.ComplianceInfo `protobuf:"bytes,1,opt,name=complianceInfo,proto3" json:"complianceInfo"` + ComplianceInfo ComplianceInfo `protobuf:"bytes,1,opt,name=complianceInfo,proto3" json:"complianceInfo"` } func (m *QueryGetComplianceInfoResponse) Reset() { *m = QueryGetComplianceInfoResponse{} } func (m *QueryGetComplianceInfoResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetComplianceInfoResponse) ProtoMessage() {} func (*QueryGetComplianceInfoResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{1} + return fileDescriptor_485cf092304eb4f7, []int{1} } func (m *QueryGetComplianceInfoResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -137,11 +135,11 @@ func (m *QueryGetComplianceInfoResponse) XXX_DiscardUnknown() { var xxx_messageInfo_QueryGetComplianceInfoResponse proto.InternalMessageInfo -func (m *QueryGetComplianceInfoResponse) GetComplianceInfo() dclcompltypes.ComplianceInfo { +func (m *QueryGetComplianceInfoResponse) GetComplianceInfo() ComplianceInfo { if m != nil { return m.ComplianceInfo } - return dclcompltypes.ComplianceInfo{} + return ComplianceInfo{} } type QueryAllComplianceInfoRequest struct { @@ -152,7 +150,7 @@ func (m *QueryAllComplianceInfoRequest) Reset() { *m = QueryAllComplianc func (m *QueryAllComplianceInfoRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllComplianceInfoRequest) ProtoMessage() {} func (*QueryAllComplianceInfoRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{2} + return fileDescriptor_485cf092304eb4f7, []int{2} } func (m *QueryAllComplianceInfoRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -189,7 +187,7 @@ func (m *QueryAllComplianceInfoRequest) GetPagination() *query.PageRequest { } type QueryAllComplianceInfoResponse struct { - ComplianceInfo []dclcompltypes.ComplianceInfo `protobuf:"bytes,1,rep,name=complianceInfo,proto3" json:"complianceInfo"` + ComplianceInfo []ComplianceInfo `protobuf:"bytes,1,rep,name=complianceInfo,proto3" json:"complianceInfo"` Pagination *query.PageResponse `protobuf:"bytes,2,opt,name=pagination,proto3" json:"pagination,omitempty"` } @@ -197,7 +195,7 @@ func (m *QueryAllComplianceInfoResponse) Reset() { *m = QueryAllComplian func (m *QueryAllComplianceInfoResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllComplianceInfoResponse) ProtoMessage() {} func (*QueryAllComplianceInfoResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{3} + return fileDescriptor_485cf092304eb4f7, []int{3} } func (m *QueryAllComplianceInfoResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -226,7 +224,7 @@ func (m *QueryAllComplianceInfoResponse) XXX_DiscardUnknown() { var xxx_messageInfo_QueryAllComplianceInfoResponse proto.InternalMessageInfo -func (m *QueryAllComplianceInfoResponse) GetComplianceInfo() []dclcompltypes.ComplianceInfo { +func (m *QueryAllComplianceInfoResponse) GetComplianceInfo() []ComplianceInfo { if m != nil { return m.ComplianceInfo } @@ -251,7 +249,7 @@ func (m *QueryGetCertifiedModelRequest) Reset() { *m = QueryGetCertified func (m *QueryGetCertifiedModelRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetCertifiedModelRequest) ProtoMessage() {} func (*QueryGetCertifiedModelRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{4} + return fileDescriptor_485cf092304eb4f7, []int{4} } func (m *QueryGetCertifiedModelRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -316,7 +314,7 @@ func (m *QueryGetCertifiedModelResponse) Reset() { *m = QueryGetCertifie func (m *QueryGetCertifiedModelResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetCertifiedModelResponse) ProtoMessage() {} func (*QueryGetCertifiedModelResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{5} + return fileDescriptor_485cf092304eb4f7, []int{5} } func (m *QueryGetCertifiedModelResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -360,7 +358,7 @@ func (m *QueryAllCertifiedModelRequest) Reset() { *m = QueryAllCertified func (m *QueryAllCertifiedModelRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllCertifiedModelRequest) ProtoMessage() {} func (*QueryAllCertifiedModelRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{6} + return fileDescriptor_485cf092304eb4f7, []int{6} } func (m *QueryAllCertifiedModelRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -405,7 +403,7 @@ func (m *QueryAllCertifiedModelResponse) Reset() { *m = QueryAllCertifie func (m *QueryAllCertifiedModelResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllCertifiedModelResponse) ProtoMessage() {} func (*QueryAllCertifiedModelResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{7} + return fileDescriptor_485cf092304eb4f7, []int{7} } func (m *QueryAllCertifiedModelResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -459,7 +457,7 @@ func (m *QueryGetRevokedModelRequest) Reset() { *m = QueryGetRevokedMode func (m *QueryGetRevokedModelRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetRevokedModelRequest) ProtoMessage() {} func (*QueryGetRevokedModelRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{8} + return fileDescriptor_485cf092304eb4f7, []int{8} } func (m *QueryGetRevokedModelRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -524,7 +522,7 @@ func (m *QueryGetRevokedModelResponse) Reset() { *m = QueryGetRevokedMod func (m *QueryGetRevokedModelResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetRevokedModelResponse) ProtoMessage() {} func (*QueryGetRevokedModelResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{9} + return fileDescriptor_485cf092304eb4f7, []int{9} } func (m *QueryGetRevokedModelResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -568,7 +566,7 @@ func (m *QueryAllRevokedModelRequest) Reset() { *m = QueryAllRevokedMode func (m *QueryAllRevokedModelRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllRevokedModelRequest) ProtoMessage() {} func (*QueryAllRevokedModelRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{10} + return fileDescriptor_485cf092304eb4f7, []int{10} } func (m *QueryAllRevokedModelRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -613,7 +611,7 @@ func (m *QueryAllRevokedModelResponse) Reset() { *m = QueryAllRevokedMod func (m *QueryAllRevokedModelResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllRevokedModelResponse) ProtoMessage() {} func (*QueryAllRevokedModelResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{11} + return fileDescriptor_485cf092304eb4f7, []int{11} } func (m *QueryAllRevokedModelResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -667,7 +665,7 @@ func (m *QueryGetProvisionalModelRequest) Reset() { *m = QueryGetProvisi func (m *QueryGetProvisionalModelRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetProvisionalModelRequest) ProtoMessage() {} func (*QueryGetProvisionalModelRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{12} + return fileDescriptor_485cf092304eb4f7, []int{12} } func (m *QueryGetProvisionalModelRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -732,7 +730,7 @@ func (m *QueryGetProvisionalModelResponse) Reset() { *m = QueryGetProvis func (m *QueryGetProvisionalModelResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetProvisionalModelResponse) ProtoMessage() {} func (*QueryGetProvisionalModelResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{13} + return fileDescriptor_485cf092304eb4f7, []int{13} } func (m *QueryGetProvisionalModelResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -776,7 +774,7 @@ func (m *QueryAllProvisionalModelRequest) Reset() { *m = QueryAllProvisi func (m *QueryAllProvisionalModelRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllProvisionalModelRequest) ProtoMessage() {} func (*QueryAllProvisionalModelRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{14} + return fileDescriptor_485cf092304eb4f7, []int{14} } func (m *QueryAllProvisionalModelRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -821,7 +819,7 @@ func (m *QueryAllProvisionalModelResponse) Reset() { *m = QueryAllProvis func (m *QueryAllProvisionalModelResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllProvisionalModelResponse) ProtoMessage() {} func (*QueryAllProvisionalModelResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{15} + return fileDescriptor_485cf092304eb4f7, []int{15} } func (m *QueryAllProvisionalModelResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -874,7 +872,7 @@ func (m *QueryGetDeviceSoftwareComplianceRequest) Reset() { func (m *QueryGetDeviceSoftwareComplianceRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetDeviceSoftwareComplianceRequest) ProtoMessage() {} func (*QueryGetDeviceSoftwareComplianceRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{16} + return fileDescriptor_485cf092304eb4f7, []int{16} } func (m *QueryGetDeviceSoftwareComplianceRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -920,7 +918,7 @@ func (m *QueryGetDeviceSoftwareComplianceResponse) Reset() { func (m *QueryGetDeviceSoftwareComplianceResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetDeviceSoftwareComplianceResponse) ProtoMessage() {} func (*QueryGetDeviceSoftwareComplianceResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{17} + return fileDescriptor_485cf092304eb4f7, []int{17} } func (m *QueryGetDeviceSoftwareComplianceResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -966,7 +964,7 @@ func (m *QueryAllDeviceSoftwareComplianceRequest) Reset() { func (m *QueryAllDeviceSoftwareComplianceRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllDeviceSoftwareComplianceRequest) ProtoMessage() {} func (*QueryAllDeviceSoftwareComplianceRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{18} + return fileDescriptor_485cf092304eb4f7, []int{18} } func (m *QueryAllDeviceSoftwareComplianceRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1013,7 +1011,7 @@ func (m *QueryAllDeviceSoftwareComplianceResponse) Reset() { func (m *QueryAllDeviceSoftwareComplianceResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllDeviceSoftwareComplianceResponse) ProtoMessage() {} func (*QueryAllDeviceSoftwareComplianceResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_06039bbcb314fcaf, []int{19} + return fileDescriptor_485cf092304eb4f7, []int{19} } func (m *QueryAllDeviceSoftwareComplianceResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1079,79 +1077,82 @@ func init() { proto.RegisterType((*QueryAllDeviceSoftwareComplianceResponse)(nil), "zigbeealliance.distributedcomplianceledger.compliance.QueryAllDeviceSoftwareComplianceResponse") } -func init() { proto.RegisterFile("compliance/query.proto", fileDescriptor_06039bbcb314fcaf) } - -var fileDescriptor_06039bbcb314fcaf = []byte{ - // 1098 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x98, 0x41, 0x6f, 0xdc, 0x44, - 0x14, 0xc7, 0x33, 0xbb, 0x2d, 0x52, 0x87, 0xd2, 0xa4, 0x43, 0x85, 0x56, 0xdb, 0xe2, 0x2c, 0x23, - 0xd4, 0xac, 0x42, 0xd7, 0x56, 0x83, 0xb8, 0x21, 0xa4, 0x6d, 0x02, 0x51, 0x85, 0x10, 0xa9, 0x13, - 0x02, 0x54, 0xa8, 0x2b, 0xef, 0x7a, 0x62, 0x46, 0x4c, 0x3c, 0x8e, 0xed, 0x2c, 0x84, 0x28, 0x17, - 0x2e, 0x9c, 0x90, 0x28, 0x08, 0x89, 0x0a, 0xf5, 0xc0, 0x17, 0xe1, 0xdc, 0x63, 0x25, 0x24, 0xc4, - 0x29, 0x42, 0x1b, 0x84, 0x04, 0x47, 0x3e, 0x01, 0xf2, 0x78, 0x9c, 0xb5, 0xbd, 0xf6, 0x6e, 0xb4, - 0xf6, 0xd2, 0x5c, 0x22, 0x6b, 0xe6, 0x79, 0xde, 0x7b, 0xbf, 0xf7, 0x8f, 0xf3, 0x9f, 0xc0, 0x97, - 0x7a, 0x7c, 0xd7, 0x61, 0xd4, 0xb0, 0x7b, 0x44, 0xdb, 0xdb, 0x27, 0xee, 0x81, 0xea, 0xb8, 0xdc, - 0xe7, 0xe8, 0x8d, 0x2f, 0xa9, 0xd5, 0x25, 0xc4, 0x60, 0xe1, 0x9e, 0x6a, 0x52, 0xcf, 0x77, 0x69, - 0x77, 0xdf, 0x27, 0xe6, 0xf0, 0x0d, 0x46, 0x4c, 0x8b, 0xb8, 0xea, 0x70, 0xa1, 0x7e, 0xc3, 0xe2, - 0xdc, 0x62, 0x44, 0x33, 0x1c, 0xaa, 0x19, 0xb6, 0xcd, 0x7d, 0xc3, 0xa7, 0xdc, 0xf6, 0xc2, 0x43, - 0xeb, 0xcb, 0x3d, 0xee, 0xed, 0x72, 0x4f, 0xeb, 0x1a, 0x9e, 0xcc, 0xa6, 0xf5, 0x6f, 0x77, 0x89, - 0x6f, 0xdc, 0xd6, 0x1c, 0xc3, 0xa2, 0xb6, 0x08, 0x96, 0xb1, 0x8d, 0x58, 0x61, 0xc3, 0xc7, 0x0e, - 0xb5, 0x77, 0x78, 0x56, 0x04, 0x71, 0x7d, 0xba, 0x43, 0x89, 0xd9, 0xd9, 0xe5, 0x26, 0x61, 0x32, - 0x42, 0x89, 0x45, 0xb8, 0xa4, 0xcf, 0x3f, 0x4b, 0xed, 0xe3, 0xd8, 0xbe, 0xe3, 0xf2, 0x3e, 0xf5, - 0x28, 0xb7, 0x0d, 0x96, 0x88, 0x79, 0x2d, 0x16, 0x63, 0x92, 0x3e, 0xed, 0x91, 0x8e, 0xc7, 0x77, - 0xfc, 0xcf, 0x0d, 0x97, 0x74, 0x86, 0x5b, 0x32, 0xf8, 0x9a, 0xc5, 0x2d, 0x2e, 0x1e, 0xb5, 0xe0, - 0x29, 0x5c, 0xc5, 0x3f, 0x01, 0xf8, 0xf2, 0xbd, 0xa0, 0xdb, 0x75, 0xe2, 0xaf, 0x9e, 0xbe, 0x72, - 0xd7, 0xde, 0xe1, 0x3a, 0xd9, 0xdb, 0x27, 0x9e, 0x8f, 0x16, 0x60, 0xb5, 0x4f, 0xcd, 0x1a, 0x68, - 0x80, 0xe6, 0x45, 0x3d, 0x78, 0x0c, 0x56, 0x1c, 0x6a, 0xd6, 0x2a, 0xe1, 0x8a, 0x43, 0x4d, 0xd4, - 0x84, 0xf3, 0x51, 0xe2, 0x6d, 0xe2, 0x06, 0x95, 0xd6, 0xaa, 0x0d, 0xd0, 0x7c, 0x41, 0x4f, 0x2f, - 0xa3, 0x5b, 0xf0, 0xaa, 0xe4, 0xd1, 0x13, 0x44, 0xb7, 0x0e, 0x1c, 0x52, 0xbb, 0xd0, 0x00, 0xcd, - 0x4b, 0xfa, 0xe8, 0x06, 0xfe, 0x01, 0x40, 0x25, 0xaf, 0x3a, 0xcf, 0xe1, 0xb6, 0x47, 0x90, 0x07, - 0xaf, 0xf4, 0x12, 0x3b, 0xa2, 0xd2, 0xe7, 0x57, 0xde, 0x56, 0xa7, 0x52, 0x89, 0x9a, 0x4c, 0x73, - 0xe7, 0xc2, 0x93, 0xe3, 0xc5, 0x39, 0x3d, 0x95, 0x02, 0x5b, 0x12, 0x5a, 0x9b, 0xb1, 0x6c, 0x68, - 0xef, 0x40, 0x38, 0x54, 0x8d, 0xac, 0xe8, 0xa6, 0x1a, 0x4a, 0x4c, 0x0d, 0x24, 0xa6, 0x86, 0x82, - 0x96, 0x12, 0x53, 0x37, 0x0c, 0x8b, 0xc8, 0x77, 0xf5, 0xd8, 0x9b, 0xf8, 0x38, 0x02, 0x90, 0x91, - 0x69, 0x0c, 0x80, 0xea, 0x8c, 0x01, 0xa0, 0xf5, 0x44, 0x7f, 0x15, 0xd1, 0xdf, 0xd2, 0xc4, 0xfe, - 0xc2, 0x8a, 0x13, 0x0d, 0x26, 0xf4, 0x17, 0xfd, 0xa2, 0xbc, 0x17, 0x68, 0xfc, 0xdc, 0xe9, 0x2f, - 0x55, 0x5d, 0x0c, 0x7f, 0x62, 0xa7, 0xa8, 0xfe, 0x12, 0x87, 0x9d, 0xe2, 0x4f, 0xac, 0x26, 0xf4, - 0x97, 0x09, 0x6d, 0x26, 0xfa, 0x3b, 0x3b, 0x80, 0xea, 0x8c, 0x01, 0x94, 0xa7, 0xbf, 0x47, 0x00, - 0x5e, 0x8f, 0x26, 0xac, 0x87, 0x9f, 0xe1, 0x73, 0xa3, 0xbe, 0x6f, 0x00, 0xbc, 0x91, 0x5d, 0x9b, - 0x44, 0xbf, 0x0b, 0x2f, 0xbb, 0xb1, 0x75, 0x39, 0xe7, 0xd5, 0x29, 0xc1, 0xc7, 0x53, 0x48, 0xec, - 0x89, 0xe3, 0x31, 0x91, 0xa8, 0xda, 0x8c, 0x65, 0xa1, 0x2a, 0x4b, 0x73, 0xbf, 0x45, 0x6d, 0x8f, - 0xe4, 0xc9, 0x6d, 0xbb, 0x3a, 0xc3, 0xb6, 0xcb, 0xd3, 0xda, 0x63, 0x00, 0x17, 0xa3, 0x79, 0x6e, - 0x0c, 0xff, 0xa4, 0x9f, 0x1b, 0xbd, 0x3d, 0x06, 0xb0, 0x91, 0x5f, 0x9f, 0x84, 0x7f, 0x00, 0x17, - 0x9c, 0xd4, 0x9e, 0x9c, 0xf5, 0xfa, 0x94, 0x03, 0x48, 0xa7, 0x92, 0x43, 0x18, 0x49, 0x83, 0xa9, - 0xc4, 0xd7, 0x66, 0x2c, 0x0f, 0x5f, 0x59, 0x1a, 0xfc, 0x2b, 0x42, 0x91, 0x99, 0x6b, 0x2c, 0x8a, - 0xea, 0xff, 0x80, 0xa2, 0x3c, 0x4d, 0x6e, 0xc2, 0xa5, 0x68, 0xe4, 0x6b, 0xc2, 0x41, 0x6e, 0x4a, - 0x09, 0x0d, 0x6d, 0x40, 0xc4, 0xb6, 0x09, 0xe7, 0x7b, 0x6b, 0xab, 0xa7, 0xaa, 0x21, 0x77, 0x43, - 0x99, 0x5e, 0xd2, 0xd3, 0xcb, 0xf8, 0x17, 0x00, 0x9b, 0x93, 0x4f, 0x95, 0x14, 0x1f, 0x02, 0x58, - 0x33, 0x73, 0x82, 0xe4, 0x04, 0xdf, 0x9f, 0x12, 0x67, 0x5e, 0x6e, 0x89, 0x35, 0x37, 0x2d, 0xde, - 0x93, 0x54, 0xda, 0x8c, 0x4d, 0xa2, 0x52, 0x96, 0xe2, 0xbe, 0xae, 0x48, 0x66, 0x63, 0x73, 0x9e, - 0x8d, 0x59, 0xf5, 0x19, 0x30, 0x2b, 0x4d, 0x92, 0x2b, 0x83, 0x6b, 0xf0, 0xa2, 0x20, 0x81, 0x7e, - 0xac, 0xc0, 0x2b, 0x49, 0x3b, 0x8a, 0xb6, 0xa6, 0x6c, 0x6b, 0xec, 0x1d, 0xa7, 0xfe, 0x41, 0xc9, - 0xa7, 0x86, 0x5d, 0xe1, 0x8f, 0xbf, 0xfa, 0xf5, 0xcf, 0xef, 0x2b, 0x9b, 0xe8, 0x9e, 0x66, 0xf6, - 0x98, 0x96, 0x79, 0x69, 0x6c, 0x05, 0x97, 0x46, 0xed, 0xb0, 0x4f, 0xcd, 0x23, 0xed, 0xd0, 0x11, - 0x3f, 0x53, 0x1f, 0xf1, 0x23, 0xed, 0x70, 0xe4, 0x53, 0x7d, 0x84, 0xfe, 0x06, 0xf0, 0x6a, 0x32, - 0x6b, 0x9b, 0xb1, 0x62, 0x74, 0xf2, 0x2e, 0x33, 0xc5, 0xe8, 0xe4, 0x5e, 0x5c, 0xf0, 0x92, 0xa0, - 0xf3, 0x0a, 0x5a, 0x9c, 0x40, 0x07, 0x3d, 0x0a, 0x64, 0x90, 0xf4, 0x7f, 0x85, 0x65, 0x90, 0xe5, - 0x9a, 0x8b, 0xcb, 0x20, 0xd3, 0x21, 0xe3, 0xfb, 0xa2, 0xd1, 0x2d, 0xa4, 0x8f, 0x34, 0x1a, 0xc5, - 0xb7, 0xc4, 0xad, 0xde, 0x9b, 0x42, 0x07, 0xff, 0x04, 0x3a, 0x48, 0xa4, 0x2d, 0x45, 0x07, 0xe5, - 0xe3, 0xc9, 0xbd, 0x40, 0xe0, 0xa6, 0xc0, 0x83, 0x51, 0x63, 0x12, 0x1e, 0xf4, 0xb0, 0x02, 0x2f, - 0xc7, 0xed, 0x1a, 0xd2, 0x0b, 0x0e, 0x2c, 0xc3, 0xc6, 0xd6, 0x37, 0x4b, 0x3d, 0x53, 0xf6, 0xf8, - 0x91, 0xe8, 0x51, 0x47, 0x1b, 0xe9, 0x1e, 0xa5, 0xd3, 0x9c, 0x5e, 0x00, 0x03, 0x00, 0xe7, 0xe3, - 0x29, 0x83, 0xf1, 0xeb, 0x05, 0x07, 0x55, 0x3a, 0x96, 0x1c, 0x27, 0x8f, 0x6f, 0x0a, 0x2c, 0x0d, - 0xa4, 0x8c, 0xc7, 0x82, 0x7e, 0xae, 0xc0, 0x85, 0xb4, 0x37, 0x42, 0xdb, 0x05, 0x07, 0x95, 0xe3, - 0x21, 0xeb, 0x1f, 0x96, 0x7e, 0xae, 0xec, 0xf6, 0x13, 0xd1, 0xed, 0x36, 0xda, 0x4a, 0x77, 0x1b, - 0xb3, 0x77, 0xd3, 0x0b, 0xe1, 0x5f, 0x00, 0x5f, 0x4c, 0xa7, 0x0e, 0xc4, 0xb0, 0x5d, 0x70, 0x70, - 0x33, 0xc1, 0x34, 0xc6, 0x56, 0xe3, 0x65, 0x81, 0xe9, 0x55, 0x84, 0x27, 0x63, 0x0a, 0x84, 0x51, - 0xcb, 0x73, 0x2c, 0xe8, 0x41, 0xc1, 0x41, 0x4e, 0xb0, 0x7e, 0xf5, 0xce, 0xcc, 0xce, 0x97, 0x24, - 0xd6, 0x04, 0x89, 0xb7, 0xd0, 0x9b, 0x69, 0x12, 0xa1, 0x09, 0x6b, 0x45, 0xba, 0x68, 0xc5, 0xb6, - 0x0e, 0x53, 0x66, 0xfc, 0x08, 0x7d, 0x57, 0x81, 0xd7, 0xf3, 0x52, 0x05, 0x02, 0x79, 0x50, 0x70, - 0x90, 0x33, 0xc5, 0x74, 0x06, 0x37, 0x8c, 0x57, 0x04, 0xa6, 0x5b, 0x68, 0xf9, 0xec, 0x98, 0xee, - 0x90, 0x27, 0x03, 0x05, 0x3c, 0x1d, 0x28, 0xe0, 0x8f, 0x81, 0x02, 0xbe, 0x3d, 0x51, 0xe6, 0x9e, - 0x9e, 0x28, 0x73, 0xbf, 0x9f, 0x28, 0x73, 0xf7, 0xdf, 0xb5, 0xa8, 0xff, 0xe9, 0x7e, 0x37, 0xa8, - 0x44, 0x0b, 0x0b, 0x6f, 0x45, 0x95, 0x6b, 0xb1, 0xca, 0x63, 0x87, 0xb5, 0xc2, 0xda, 0xb5, 0x2f, - 0xe2, 0xb9, 0xfd, 0x03, 0x87, 0x78, 0xdd, 0xe7, 0xc4, 0x7f, 0xd9, 0x5f, 0xff, 0x2f, 0x00, 0x00, - 0xff, 0xff, 0x61, 0x71, 0x5b, 0x6a, 0xcb, 0x18, 0x00, 0x00, +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/compliance/query.proto", fileDescriptor_485cf092304eb4f7) +} + +var fileDescriptor_485cf092304eb4f7 = []byte{ + // 1107 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x98, 0xd1, 0x6f, 0xdb, 0x44, + 0x1c, 0xc7, 0x7b, 0xc9, 0x86, 0xb4, 0x63, 0xac, 0xdd, 0xb1, 0x87, 0x28, 0x1b, 0x69, 0x38, 0xa1, + 0x35, 0xaa, 0x16, 0x5b, 0x2b, 0xe2, 0x0d, 0x21, 0x65, 0x2d, 0x54, 0xd5, 0x34, 0xd1, 0xb9, 0xa5, + 0xc0, 0x84, 0x16, 0x39, 0xf1, 0xd5, 0x9c, 0xb8, 0xfa, 0x5c, 0xdb, 0x0d, 0x94, 0xaa, 0x2f, 0xbc, + 0xf0, 0x84, 0xc4, 0x40, 0x48, 0x4c, 0x68, 0x0f, 0xfc, 0x23, 0x3c, 0xef, 0x71, 0x12, 0x12, 0xe2, + 0x69, 0x42, 0x29, 0x42, 0x82, 0x47, 0xfe, 0x02, 0xe4, 0xf3, 0xb9, 0xb1, 0x1d, 0x3b, 0xa9, 0x6c, + 0x87, 0xf5, 0x25, 0xb2, 0xee, 0xec, 0xdf, 0xef, 0x7e, 0x9f, 0xdf, 0xf7, 0x9c, 0xef, 0x19, 0x76, + 0xbe, 0xa4, 0x66, 0x8f, 0x10, 0x9d, 0x31, 0xaa, 0x5b, 0x7d, 0xa2, 0x1a, 0xd4, 0xf5, 0x1c, 0xda, + 0x3b, 0xf0, 0x88, 0xd1, 0xe7, 0x7b, 0x76, 0x30, 0xca, 0x88, 0x61, 0x12, 0x47, 0x1d, 0x0d, 0xa8, + 0xfb, 0x07, 0xc4, 0x39, 0x54, 0x6c, 0x87, 0x7b, 0x1c, 0xbd, 0x15, 0x0f, 0xa1, 0x4c, 0x08, 0xa1, + 0x8c, 0x06, 0xea, 0x37, 0x4c, 0xce, 0x4d, 0x46, 0x54, 0xdd, 0xa6, 0xaa, 0x6e, 0x59, 0xdc, 0xd3, + 0x3d, 0xca, 0x2d, 0x37, 0x08, 0x5a, 0x5f, 0xee, 0x73, 0x77, 0x8f, 0xbb, 0x6a, 0x4f, 0x77, 0x65, + 0x36, 0x75, 0x70, 0xbb, 0x47, 0x3c, 0xfd, 0xb6, 0x6a, 0xeb, 0x26, 0xb5, 0xc4, 0xcd, 0xf2, 0xde, + 0xbb, 0xf9, 0x6a, 0x18, 0x5d, 0x76, 0xa9, 0xb5, 0xcb, 0x0b, 0x06, 0x23, 0x8e, 0x47, 0x77, 0x29, + 0x31, 0xba, 0x7b, 0xdc, 0x20, 0x4c, 0x06, 0xdb, 0xc8, 0x17, 0xcc, 0x21, 0x03, 0xfe, 0x59, 0x22, + 0xd4, 0xbd, 0x7c, 0xa1, 0x6c, 0x87, 0x0f, 0xa8, 0x4b, 0xb9, 0xa5, 0xb3, 0x58, 0xb8, 0x9d, 0x7c, + 0xe1, 0x0c, 0x32, 0xa0, 0x7d, 0xd2, 0x75, 0xf9, 0xae, 0xf7, 0xb9, 0xee, 0x90, 0xee, 0x68, 0x4a, + 0xc6, 0xbd, 0x66, 0x72, 0x93, 0x8b, 0x4b, 0xd5, 0xbf, 0x0a, 0x46, 0xf1, 0x4f, 0x00, 0xbe, 0x76, + 0xdf, 0x6f, 0xe2, 0x3a, 0xf1, 0x56, 0x4f, 0x1f, 0xd9, 0xb0, 0x76, 0xb9, 0x46, 0xf6, 0x0f, 0x88, + 0xeb, 0xa1, 0x05, 0x58, 0x1d, 0x50, 0xa3, 0x06, 0x9a, 0xa0, 0x75, 0x51, 0xf3, 0x2f, 0xfd, 0x11, + 0x9b, 0x1a, 0xb5, 0x4a, 0x30, 0x62, 0x53, 0x03, 0xb5, 0xe0, 0x7c, 0x98, 0x78, 0x87, 0x38, 0x7e, + 0x51, 0xb5, 0x6a, 0x13, 0xb4, 0x5e, 0xd1, 0x92, 0xc3, 0xe8, 0x16, 0xbc, 0x2a, 0x1b, 0xd2, 0x17, + 0x42, 0xd9, 0x3e, 0xb4, 0x49, 0xed, 0x42, 0x13, 0xb4, 0x2e, 0x69, 0xe3, 0x13, 0xf8, 0x07, 0x00, + 0x1b, 0x59, 0xab, 0x73, 0x6d, 0x6e, 0xb9, 0x04, 0xb9, 0xf0, 0x4a, 0x3f, 0x36, 0x23, 0x56, 0xfa, + 0xf2, 0xca, 0xbb, 0x4a, 0x2e, 0xf1, 0x2b, 0xf1, 0x34, 0x77, 0x2e, 0x3c, 0x7d, 0xbe, 0x38, 0xa7, + 0x25, 0x52, 0x60, 0x53, 0x42, 0xeb, 0x30, 0x96, 0x0e, 0xed, 0x3d, 0x08, 0x47, 0x9b, 0x41, 0xae, + 0xe8, 0xa6, 0x12, 0xec, 0x1c, 0xc5, 0xdf, 0x39, 0x4a, 0xb0, 0x4f, 0xe5, 0xce, 0x51, 0x36, 0x75, + 0x93, 0xc8, 0x67, 0xb5, 0xc8, 0x93, 0xf8, 0x79, 0x08, 0x20, 0x25, 0xd3, 0x04, 0x00, 0xd5, 0x19, + 0x03, 0x40, 0xeb, 0xb1, 0xfa, 0x2a, 0xa2, 0xbe, 0xa5, 0xa9, 0xf5, 0x05, 0x2b, 0x8e, 0x15, 0x18, + 0xd3, 0x5f, 0xb8, 0x53, 0xef, 0xf9, 0xdb, 0xe1, 0xdc, 0xe9, 0x2f, 0xb1, 0xba, 0x08, 0xfe, 0xd8, + 0x4c, 0x51, 0xfd, 0xc5, 0x82, 0x9d, 0xe2, 0x8f, 0x8d, 0xc6, 0xf4, 0x97, 0x0a, 0x6d, 0x26, 0xfa, + 0x3b, 0x3b, 0x80, 0xea, 0x8c, 0x01, 0x94, 0xa7, 0xbf, 0xc7, 0x00, 0x5e, 0x0f, 0x3b, 0xac, 0x05, + 0x2f, 0xf7, 0x73, 0xa3, 0xbe, 0x6f, 0x00, 0xbc, 0x91, 0xbe, 0x36, 0x89, 0x7e, 0x0f, 0x5e, 0x76, + 0x22, 0xe3, 0xb2, 0xcf, 0xab, 0x39, 0xc1, 0x47, 0x53, 0x48, 0xec, 0xb1, 0xf0, 0x98, 0x48, 0x54, + 0x1d, 0xc6, 0xd2, 0x50, 0x95, 0xa5, 0xb9, 0xdf, 0xc2, 0xb2, 0xc7, 0xf2, 0x64, 0x96, 0x5d, 0x9d, + 0x61, 0xd9, 0xe5, 0x69, 0xed, 0x09, 0x80, 0x8b, 0x61, 0x3f, 0x37, 0x47, 0xff, 0xfe, 0xe7, 0x46, + 0x6f, 0x4f, 0x00, 0x6c, 0x66, 0xaf, 0x4f, 0xc2, 0x3f, 0x84, 0x0b, 0x76, 0x62, 0x4e, 0xf6, 0x7a, + 0x3d, 0x67, 0x03, 0x92, 0xa9, 0x64, 0x13, 0xc6, 0xd2, 0x60, 0x2a, 0xf1, 0x75, 0x18, 0xcb, 0xc2, + 0x57, 0x96, 0x06, 0xff, 0x0a, 0x51, 0xa4, 0xe6, 0x9a, 0x88, 0xa2, 0xfa, 0x3f, 0xa0, 0x28, 0x4f, + 0x93, 0x5b, 0x70, 0x29, 0x6c, 0xf9, 0x9a, 0x70, 0x90, 0x5b, 0x52, 0x42, 0x23, 0x1b, 0x10, 0xb2, + 0x6d, 0xc1, 0xf9, 0xfe, 0xda, 0xea, 0xa9, 0x6a, 0xc8, 0x46, 0x20, 0xd3, 0x4b, 0x5a, 0x72, 0x18, + 0xff, 0x02, 0x60, 0x6b, 0x7a, 0x54, 0x49, 0xf1, 0x11, 0x80, 0x35, 0x23, 0xe3, 0x26, 0xd9, 0xc1, + 0xf7, 0x73, 0xe2, 0xcc, 0xca, 0x2d, 0xb1, 0x66, 0xa6, 0xc5, 0xfb, 0x92, 0x4a, 0x87, 0xb1, 0x69, + 0x54, 0xca, 0x52, 0xdc, 0xd7, 0x15, 0xc9, 0x6c, 0x62, 0xce, 0xb3, 0x31, 0xab, 0xbe, 0x00, 0x66, + 0xa5, 0x49, 0x72, 0x65, 0x78, 0x0d, 0x5e, 0x14, 0x24, 0xd0, 0x8f, 0x15, 0x78, 0x25, 0x6e, 0x47, + 0xd1, 0x76, 0xce, 0xb2, 0x26, 0x9e, 0x71, 0xea, 0x1f, 0x94, 0x1c, 0x35, 0xa8, 0x0a, 0x7f, 0xfc, + 0xd5, 0xaf, 0x7f, 0x7e, 0x5f, 0xd9, 0x42, 0xf7, 0x55, 0xa3, 0xcf, 0xd2, 0x0f, 0xb8, 0x6d, 0xff, + 0x80, 0xab, 0x1e, 0x0d, 0xa8, 0x71, 0xac, 0x1e, 0xd9, 0xe2, 0x37, 0xf1, 0x12, 0x3f, 0x56, 0x8f, + 0xc6, 0x5e, 0xd5, 0xc7, 0xe8, 0x6f, 0x00, 0xaf, 0xc6, 0xb3, 0x76, 0x18, 0x2b, 0x46, 0x27, 0xeb, + 0x30, 0x53, 0x8c, 0x4e, 0xe6, 0xc1, 0x05, 0x2f, 0x09, 0x3a, 0xaf, 0xa3, 0xc5, 0x29, 0x74, 0xd0, + 0x63, 0x5f, 0x06, 0x71, 0xff, 0x57, 0x58, 0x06, 0x69, 0xae, 0xb9, 0xb8, 0x0c, 0x52, 0x1d, 0x32, + 0x7e, 0x20, 0x0a, 0xdd, 0x46, 0xda, 0x58, 0xa1, 0xe1, 0xfd, 0x6d, 0xf1, 0x01, 0xc0, 0xcd, 0xa1, + 0x83, 0x7f, 0x7c, 0x1d, 0xc4, 0xd2, 0x96, 0xa2, 0x83, 0xf2, 0xf1, 0x64, 0x1e, 0x20, 0x70, 0x4b, + 0xe0, 0xc1, 0xa8, 0x39, 0x0d, 0x0f, 0x7a, 0x54, 0x81, 0x97, 0xa3, 0x76, 0x0d, 0x69, 0x05, 0x1b, + 0x96, 0x62, 0x63, 0xeb, 0x5b, 0xa5, 0xc6, 0x94, 0x35, 0x7e, 0x24, 0x6a, 0xd4, 0xd0, 0x66, 0xb2, + 0x46, 0xe9, 0x34, 0xf3, 0x0b, 0x60, 0x08, 0xe0, 0x7c, 0x34, 0xa5, 0xdf, 0x7e, 0xad, 0x60, 0xa3, + 0x4a, 0xc7, 0x92, 0xe1, 0xe4, 0xf1, 0x4d, 0x81, 0xa5, 0x89, 0x1a, 0x93, 0xb1, 0xa0, 0x9f, 0x2b, + 0x70, 0x21, 0xe9, 0x8d, 0xd0, 0x4e, 0xc1, 0x46, 0x65, 0x78, 0xc8, 0xfa, 0x87, 0xa5, 0xc7, 0x95, + 0xd5, 0x7e, 0x22, 0xaa, 0xdd, 0x41, 0xdb, 0xc9, 0x6a, 0x23, 0xf6, 0x2e, 0xbf, 0x10, 0xfe, 0x05, + 0xf0, 0xd5, 0x64, 0x6a, 0x5f, 0x0c, 0x3b, 0x05, 0x1b, 0x37, 0x13, 0x4c, 0x13, 0x6c, 0x35, 0x5e, + 0x16, 0x98, 0xde, 0x40, 0x78, 0x3a, 0x26, 0x5f, 0x18, 0xb5, 0x2c, 0xc7, 0x82, 0x1e, 0x16, 0x6c, + 0xe4, 0x14, 0xeb, 0x57, 0xef, 0xce, 0x2c, 0xbe, 0x24, 0xb1, 0x26, 0x48, 0xbc, 0x83, 0xde, 0x4e, + 0x92, 0x08, 0x4c, 0x58, 0x3b, 0xd4, 0x45, 0x3b, 0x32, 0x75, 0x94, 0x30, 0xe3, 0xc7, 0xe8, 0xbb, + 0x0a, 0xbc, 0x9e, 0x95, 0xca, 0x17, 0xc8, 0xc3, 0x82, 0x8d, 0x9c, 0x29, 0xa6, 0x33, 0xb8, 0x61, + 0xbc, 0x22, 0x30, 0xdd, 0x42, 0xcb, 0x67, 0xc7, 0x74, 0x87, 0x3c, 0x1d, 0x36, 0xc0, 0xb3, 0x61, + 0x03, 0xfc, 0x31, 0x6c, 0x80, 0x6f, 0x4f, 0x1a, 0x73, 0xcf, 0x4e, 0x1a, 0x73, 0xbf, 0x9f, 0x34, + 0xe6, 0x1e, 0xdc, 0x35, 0xa9, 0xf7, 0xe9, 0x41, 0xcf, 0x5f, 0x89, 0x1a, 0x2c, 0xbc, 0x9d, 0xf6, + 0x2d, 0x3e, 0x12, 0xac, 0x2d, 0xbf, 0xc6, 0x7f, 0x11, 0xcd, 0xed, 0x1d, 0xda, 0xc4, 0xed, 0xbd, + 0x24, 0xbe, 0xb2, 0xbf, 0xf9, 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xa9, 0x8c, 0x65, 0x79, 0xcd, + 0x19, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1575,7 +1576,7 @@ var _Query_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "compliance/query.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/compliance/query.proto", } func (m *QueryGetComplianceInfoRequest) Marshal() (dAtA []byte, err error) { @@ -3072,7 +3073,7 @@ func (m *QueryAllComplianceInfoResponse) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.ComplianceInfo = append(m.ComplianceInfo, dclcompltypes.ComplianceInfo{}) + m.ComplianceInfo = append(m.ComplianceInfo, ComplianceInfo{}) if err := m.ComplianceInfo[len(m.ComplianceInfo)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } diff --git a/x/compliance/types/query.pb.gw.go b/x/compliance/types/query.pb.gw.go index a0569d9c5..61e304bc2 100644 --- a/x/compliance/types/query.pb.gw.go +++ b/x/compliance/types/query.pb.gw.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: compliance/query.proto +// source: zigbeealliance/distributedcomplianceledger/compliance/query.proto /* Package types is a reverse proxy. @@ -1228,25 +1228,25 @@ func RegisterQueryHandlerClient(ctx context.Context, mux *runtime.ServeMux, clie } var ( - pattern_Query_ComplianceInfo_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"dcl", "compliance", "compliance-info", "vid", "pid", "softwareVersion", "certificationType"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_ComplianceInfo_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"dcl", "compliance", "compliance-info", "vid", "pid", "softwareVersion", "certificationType"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_ComplianceInfoAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "compliance", "compliance-info"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_ComplianceInfoAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "compliance", "compliance-info"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_CertifiedModel_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"dcl", "compliance", "certified-models", "vid", "pid", "softwareVersion", "certificationType"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_CertifiedModel_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"dcl", "compliance", "certified-models", "vid", "pid", "softwareVersion", "certificationType"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_CertifiedModelAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "compliance", "certified-models"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_CertifiedModelAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "compliance", "certified-models"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_RevokedModel_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"dcl", "compliance", "revoked-models", "vid", "pid", "softwareVersion", "certificationType"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_RevokedModel_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"dcl", "compliance", "revoked-models", "vid", "pid", "softwareVersion", "certificationType"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_RevokedModelAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "compliance", "revoked-models"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_RevokedModelAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "compliance", "revoked-models"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_ProvisionalModel_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"dcl", "compliance", "provisional-models", "vid", "pid", "softwareVersion", "certificationType"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_ProvisionalModel_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"dcl", "compliance", "provisional-models", "vid", "pid", "softwareVersion", "certificationType"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_ProvisionalModelAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "compliance", "provisional-models"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_ProvisionalModelAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "compliance", "provisional-models"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_DeviceSoftwareCompliance_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "compliance", "device-software-compliance", "cDCertificateId"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_DeviceSoftwareCompliance_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "compliance", "device-software-compliance", "cDCertificateId"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_DeviceSoftwareComplianceAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "compliance", "device-software-compliance"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_DeviceSoftwareComplianceAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "compliance", "device-software-compliance"}, "", runtime.AssumeColonVerbOpt(false))) ) var ( diff --git a/x/compliance/types/revoked_model.pb.go b/x/compliance/types/revoked_model.pb.go index e1e7e3c14..91b82a80e 100644 --- a/x/compliance/types/revoked_model.pb.go +++ b/x/compliance/types/revoked_model.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: compliance/revoked_model.proto +// source: zigbeealliance/distributedcomplianceledger/compliance/revoked_model.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -34,7 +34,7 @@ func (m *RevokedModel) Reset() { *m = RevokedModel{} } func (m *RevokedModel) String() string { return proto.CompactTextString(m) } func (*RevokedModel) ProtoMessage() {} func (*RevokedModel) Descriptor() ([]byte, []int) { - return fileDescriptor_6ab8669fdd49fd2f, []int{0} + return fileDescriptor_8ce0c072c0981ccd, []int{0} } func (m *RevokedModel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -102,27 +102,29 @@ func init() { proto.RegisterType((*RevokedModel)(nil), "zigbeealliance.distributedcomplianceledger.compliance.RevokedModel") } -func init() { proto.RegisterFile("compliance/revoked_model.proto", fileDescriptor_6ab8669fdd49fd2f) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/compliance/revoked_model.proto", fileDescriptor_8ce0c072c0981ccd) +} -var fileDescriptor_6ab8669fdd49fd2f = []byte{ - // 271 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x90, 0x31, 0x4e, 0xec, 0x30, - 0x10, 0x86, 0xe3, 0xb7, 0x2f, 0x08, 0x2c, 0x10, 0x10, 0x51, 0xa4, 0xb2, 0x22, 0xaa, 0x14, 0x24, - 0x29, 0x10, 0x17, 0xa0, 0x45, 0x34, 0x16, 0xa2, 0xa0, 0x41, 0x4e, 0x3c, 0x1b, 0x46, 0x78, 0x63, - 0xcb, 0x71, 0x02, 0xcb, 0x29, 0x38, 0x02, 0xc7, 0xa1, 0xdc, 0x92, 0x12, 0x25, 0x17, 0x41, 0xc1, - 0x42, 0x59, 0x41, 0x67, 0x7f, 0x33, 0xf3, 0x4b, 0xdf, 0x4f, 0x59, 0xa5, 0x57, 0x46, 0xa1, 0x68, - 0x2a, 0x28, 0x2c, 0xf4, 0xfa, 0x11, 0xe4, 0xfd, 0x4a, 0x4b, 0x50, 0xb9, 0xb1, 0xda, 0xe9, 0xe8, - 0xe2, 0x05, 0xeb, 0x12, 0x40, 0x28, 0xbf, 0x93, 0x4b, 0x6c, 0x9d, 0xc5, 0xb2, 0x73, 0x20, 0xe7, - 0x4b, 0x05, 0xb2, 0x06, 0x9b, 0xcf, 0xe0, 0xf4, 0x8d, 0xd0, 0x7d, 0xee, 0xe3, 0xae, 0xa7, 0xb4, - 0xe8, 0x88, 0x2e, 0x7a, 0x94, 0x31, 0x49, 0x48, 0x1a, 0xf2, 0xe9, 0x39, 0x11, 0x83, 0x32, 0xfe, - 0xe7, 0x89, 0x41, 0x19, 0xa5, 0xf4, 0xb0, 0xd5, 0x4b, 0xf7, 0x24, 0x2c, 0xdc, 0x82, 0x6d, 0x51, - 0x37, 0xf1, 0x22, 0x21, 0xe9, 0x01, 0xff, 0x8d, 0xa3, 0x33, 0x7a, 0x5c, 0x81, 0x75, 0xb8, 0xc4, - 0x4a, 0x38, 0xd4, 0xcd, 0xcd, 0xda, 0x40, 0xfc, 0x3f, 0x21, 0xe9, 0x1e, 0xff, 0x3b, 0x88, 0x4e, - 0x68, 0xd8, 0x0b, 0xd5, 0x41, 0x1c, 0x26, 0x24, 0xdd, 0xe5, 0xfe, 0x73, 0x09, 0xef, 0x03, 0x23, - 0x9b, 0x81, 0x91, 0xcf, 0x81, 0x91, 0xd7, 0x91, 0x05, 0x9b, 0x91, 0x05, 0x1f, 0x23, 0x0b, 0xee, - 0xae, 0x6a, 0x74, 0x0f, 0x5d, 0x39, 0xf9, 0x14, 0x5e, 0x3f, 0xfb, 0xf1, 0x2f, 0xb6, 0xfc, 0xb3, - 0xd9, 0x37, 0xf3, 0x0d, 0x14, 0xcf, 0xc5, 0x56, 0x9d, 0x6e, 0x6d, 0xa0, 0x2d, 0x77, 0xbe, 0x7b, - 0x3c, 0xff, 0x0a, 0x00, 0x00, 0xff, 0xff, 0x37, 0xea, 0x5c, 0xd9, 0x69, 0x01, 0x00, 0x00, +var fileDescriptor_8ce0c072c0981ccd = []byte{ + // 272 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xbf, 0x4a, 0xc5, 0x30, + 0x14, 0x87, 0x1b, 0xaf, 0x15, 0x0d, 0x8a, 0x5a, 0x1c, 0x3a, 0x85, 0xe2, 0xd4, 0xc1, 0xb6, 0x83, + 0xf8, 0x02, 0x6e, 0x22, 0x2e, 0x45, 0x1c, 0x5c, 0x24, 0x6d, 0xce, 0xad, 0x07, 0x73, 0x9b, 0x90, + 0xa6, 0xd5, 0xeb, 0x53, 0xf8, 0x08, 0x3e, 0x8e, 0xe3, 0x1d, 0x1d, 0xa5, 0x7d, 0x11, 0xa9, 0x11, + 0xea, 0x3f, 0xdc, 0x92, 0xef, 0xc0, 0x07, 0xbf, 0x8f, 0x9e, 0x3d, 0x62, 0x55, 0x00, 0x70, 0x29, + 0x91, 0xd7, 0x25, 0x64, 0x02, 0x1b, 0x6b, 0xb0, 0x68, 0x2d, 0x88, 0x52, 0x2d, 0xb4, 0xa3, 0x12, + 0x44, 0x05, 0x26, 0x9b, 0x40, 0x66, 0xa0, 0x53, 0x77, 0x20, 0x6e, 0x16, 0x4a, 0x80, 0x4c, 0xb5, + 0x51, 0x56, 0x05, 0x27, 0xdf, 0x55, 0xe9, 0x3f, 0xaa, 0x74, 0x02, 0x87, 0xcf, 0x84, 0x6e, 0xe7, + 0x4e, 0x77, 0x31, 0xda, 0x82, 0x3d, 0x3a, 0xeb, 0x50, 0x84, 0x24, 0x22, 0xb1, 0x9f, 0x8f, 0xcf, + 0x91, 0x68, 0x14, 0xe1, 0x9a, 0x23, 0x1a, 0x45, 0x10, 0xd3, 0xdd, 0x46, 0xcd, 0xed, 0x3d, 0x37, + 0x70, 0x05, 0xa6, 0x41, 0x55, 0x87, 0xb3, 0x88, 0xc4, 0x3b, 0xf9, 0x4f, 0x1c, 0x1c, 0xd1, 0xfd, + 0x12, 0x8c, 0xc5, 0x39, 0x96, 0xdc, 0xa2, 0xaa, 0x2f, 0x97, 0x1a, 0xc2, 0xf5, 0x88, 0xc4, 0x5b, + 0xf9, 0xef, 0x43, 0x70, 0x40, 0xfd, 0x8e, 0xcb, 0x16, 0x42, 0x3f, 0x22, 0xf1, 0x66, 0xee, 0x3e, + 0xa7, 0xf0, 0xd2, 0x33, 0xb2, 0xea, 0x19, 0x79, 0xeb, 0x19, 0x79, 0x1a, 0x98, 0xb7, 0x1a, 0x98, + 0xf7, 0x3a, 0x30, 0xef, 0xfa, 0xbc, 0x42, 0x7b, 0xdb, 0x16, 0xe3, 0x9e, 0xcc, 0xcd, 0x4f, 0xfe, + 0x4a, 0x99, 0x4c, 0x7b, 0x93, 0xcf, 0x98, 0x0f, 0x5f, 0x73, 0xda, 0xa5, 0x86, 0xa6, 0xd8, 0xf8, + 0xe8, 0x78, 0xfc, 0x1e, 0x00, 0x00, 0xff, 0xff, 0x06, 0xe1, 0x80, 0xfc, 0x94, 0x01, 0x00, 0x00, } func (m *RevokedModel) Marshal() (dAtA []byte, err error) { diff --git a/x/compliance/types/tx.pb.go b/x/compliance/types/tx.pb.go index 2c5b383db..774a25ecf 100644 --- a/x/compliance/types/tx.pb.go +++ b/x/compliance/types/tx.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: compliance/tx.proto +// source: zigbeealliance/distributedcomplianceledger/compliance/tx.proto package types @@ -7,9 +7,9 @@ import ( context "context" fmt "fmt" _ "github.com/cosmos/cosmos-proto" - _ "github.com/gogo/protobuf/gogoproto" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" @@ -58,7 +58,7 @@ func (m *MsgCertifyModel) Reset() { *m = MsgCertifyModel{} } func (m *MsgCertifyModel) String() string { return proto.CompactTextString(m) } func (*MsgCertifyModel) ProtoMessage() {} func (*MsgCertifyModel) Descriptor() ([]byte, []int) { - return fileDescriptor_db4a0d801b7ae7cc, []int{0} + return fileDescriptor_631265d620e99057, []int{0} } func (m *MsgCertifyModel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -248,7 +248,7 @@ func (m *MsgCertifyModelResponse) Reset() { *m = MsgCertifyModelResponse func (m *MsgCertifyModelResponse) String() string { return proto.CompactTextString(m) } func (*MsgCertifyModelResponse) ProtoMessage() {} func (*MsgCertifyModelResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_db4a0d801b7ae7cc, []int{1} + return fileDescriptor_631265d620e99057, []int{1} } func (m *MsgCertifyModelResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -294,7 +294,7 @@ func (m *MsgRevokeModel) Reset() { *m = MsgRevokeModel{} } func (m *MsgRevokeModel) String() string { return proto.CompactTextString(m) } func (*MsgRevokeModel) ProtoMessage() {} func (*MsgRevokeModel) Descriptor() ([]byte, []int) { - return fileDescriptor_db4a0d801b7ae7cc, []int{2} + return fileDescriptor_631265d620e99057, []int{2} } func (m *MsgRevokeModel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -400,7 +400,7 @@ func (m *MsgRevokeModelResponse) Reset() { *m = MsgRevokeModelResponse{} func (m *MsgRevokeModelResponse) String() string { return proto.CompactTextString(m) } func (*MsgRevokeModelResponse) ProtoMessage() {} func (*MsgRevokeModelResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_db4a0d801b7ae7cc, []int{3} + return fileDescriptor_631265d620e99057, []int{3} } func (m *MsgRevokeModelResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -458,7 +458,7 @@ func (m *MsgProvisionModel) Reset() { *m = MsgProvisionModel{} } func (m *MsgProvisionModel) String() string { return proto.CompactTextString(m) } func (*MsgProvisionModel) ProtoMessage() {} func (*MsgProvisionModel) Descriptor() ([]byte, []int) { - return fileDescriptor_db4a0d801b7ae7cc, []int{4} + return fileDescriptor_631265d620e99057, []int{4} } func (m *MsgProvisionModel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -648,7 +648,7 @@ func (m *MsgProvisionModelResponse) Reset() { *m = MsgProvisionModelResp func (m *MsgProvisionModelResponse) String() string { return proto.CompactTextString(m) } func (*MsgProvisionModelResponse) ProtoMessage() {} func (*MsgProvisionModelResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_db4a0d801b7ae7cc, []int{5} + return fileDescriptor_631265d620e99057, []int{5} } func (m *MsgProvisionModelResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -706,7 +706,7 @@ func (m *MsgUpdateComplianceInfo) Reset() { *m = MsgUpdateComplianceInfo func (m *MsgUpdateComplianceInfo) String() string { return proto.CompactTextString(m) } func (*MsgUpdateComplianceInfo) ProtoMessage() {} func (*MsgUpdateComplianceInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_db4a0d801b7ae7cc, []int{6} + return fileDescriptor_631265d620e99057, []int{6} } func (m *MsgUpdateComplianceInfo) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -896,7 +896,7 @@ func (m *MsgUpdateComplianceInfoResponse) Reset() { *m = MsgUpdateCompli func (m *MsgUpdateComplianceInfoResponse) String() string { return proto.CompactTextString(m) } func (*MsgUpdateComplianceInfoResponse) ProtoMessage() {} func (*MsgUpdateComplianceInfoResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_db4a0d801b7ae7cc, []int{7} + return fileDescriptor_631265d620e99057, []int{7} } func (m *MsgUpdateComplianceInfoResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -937,7 +937,7 @@ func (m *MsgDeleteComplianceInfo) Reset() { *m = MsgDeleteComplianceInfo func (m *MsgDeleteComplianceInfo) String() string { return proto.CompactTextString(m) } func (*MsgDeleteComplianceInfo) ProtoMessage() {} func (*MsgDeleteComplianceInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_db4a0d801b7ae7cc, []int{8} + return fileDescriptor_631265d620e99057, []int{8} } func (m *MsgDeleteComplianceInfo) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1008,7 +1008,7 @@ func (m *MsgDeleteComplianceInfoResponse) Reset() { *m = MsgDeleteCompli func (m *MsgDeleteComplianceInfoResponse) String() string { return proto.CompactTextString(m) } func (*MsgDeleteComplianceInfoResponse) ProtoMessage() {} func (*MsgDeleteComplianceInfoResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_db4a0d801b7ae7cc, []int{9} + return fileDescriptor_631265d620e99057, []int{9} } func (m *MsgDeleteComplianceInfoResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1050,82 +1050,85 @@ func init() { proto.RegisterType((*MsgDeleteComplianceInfoResponse)(nil), "zigbeealliance.distributedcomplianceledger.compliance.MsgDeleteComplianceInfoResponse") } -func init() { proto.RegisterFile("compliance/tx.proto", fileDescriptor_db4a0d801b7ae7cc) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/compliance/tx.proto", fileDescriptor_631265d620e99057) +} -var fileDescriptor_db4a0d801b7ae7cc = []byte{ - // 1151 bytes of a gzipped FileDescriptorProto +var fileDescriptor_631265d620e99057 = []byte{ + // 1154 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x99, 0xcf, 0x4f, 0x1b, 0x47, - 0x14, 0xc7, 0xb3, 0x21, 0x40, 0x18, 0xf3, 0x23, 0x1e, 0x20, 0x19, 0x1c, 0xc9, 0xa6, 0xee, 0x85, - 0x43, 0xb0, 0x21, 0x84, 0x54, 0x6a, 0x84, 0x2a, 0x30, 0xb4, 0xb1, 0x52, 0x07, 0x6a, 0x42, 0x0e, - 0xed, 0xa1, 0x5a, 0xbc, 0xcf, 0xcb, 0xaa, 0xbb, 0x3b, 0xdb, 0x99, 0x31, 0x81, 0xde, 0x7b, 0xcf, - 0xff, 0x50, 0xa9, 0xa7, 0x1e, 0x7b, 0xae, 0xd4, 0x5b, 0xd4, 0x53, 0xd4, 0x53, 0x4f, 0x56, 0x05, - 0xff, 0x81, 0xd5, 0x3f, 0xa0, 0xda, 0x5d, 0xef, 0x7a, 0x77, 0xbd, 0x5e, 0xb6, 0x06, 0x35, 0x91, - 0xc2, 0xcd, 0x1e, 0xbf, 0xef, 0x77, 0xde, 0xcc, 0x1b, 0x7f, 0xde, 0x78, 0x8d, 0x66, 0x1b, 0xd4, - 0xb0, 0x74, 0x4d, 0x36, 0x1b, 0x50, 0x16, 0x27, 0x25, 0x8b, 0x51, 0x41, 0xf1, 0xfa, 0x0f, 0x9a, - 0x7a, 0x08, 0x20, 0xeb, 0xee, 0x07, 0x25, 0x45, 0xe3, 0x82, 0x69, 0x87, 0x2d, 0x01, 0x4a, 0x2f, - 0x5c, 0x07, 0x45, 0x05, 0x56, 0xea, 0x0d, 0xe4, 0x16, 0x1a, 0x94, 0x1b, 0x94, 0x7f, 0xeb, 0x98, - 0x94, 0xdd, 0x37, 0xae, 0x63, 0x6e, 0x4e, 0xa5, 0x2a, 0x75, 0xc7, 0xed, 0x57, 0xee, 0x68, 0xf1, - 0x8f, 0x0c, 0x9a, 0xa9, 0x71, 0xb5, 0x02, 0x4c, 0x68, 0xcd, 0xd3, 0x1a, 0x55, 0x40, 0xc7, 0x5f, - 0xa0, 0x31, 0xae, 0xa9, 0x26, 0x30, 0x22, 0x2d, 0x4a, 0x4b, 0x13, 0x5b, 0xe5, 0x4e, 0xbb, 0x30, - 0x7b, 0x2c, 0xeb, 0x9a, 0x22, 0x0b, 0xf8, 0xb4, 0xc8, 0xe0, 0xfb, 0x96, 0xc6, 0x40, 0x29, 0xfe, - 0xf9, 0xeb, 0xf2, 0x5c, 0x77, 0x8a, 0x4d, 0x45, 0x61, 0xc0, 0xf9, 0xbe, 0x60, 0x9a, 0xa9, 0xd6, - 0xbb, 0x72, 0xbc, 0x82, 0x46, 0x8e, 0x35, 0x85, 0xdc, 0x5c, 0x94, 0x96, 0x46, 0xb7, 0xf2, 0x9d, - 0x76, 0x21, 0xd7, 0x73, 0x51, 0x05, 0x6c, 0xac, 0x3e, 0xd0, 0x05, 0x6c, 0x3c, 0x5e, 0x5f, 0x5f, - 0x5b, 0x2f, 0xd6, 0xed, 0x50, 0x5b, 0x61, 0x69, 0x0a, 0x19, 0x49, 0xa7, 0xb0, 0x34, 0x05, 0x2f, - 0xa1, 0x19, 0x4e, 0x9b, 0xe2, 0x95, 0xcc, 0xe0, 0x25, 0x30, 0xae, 0x51, 0x93, 0xdc, 0x5a, 0x94, - 0x96, 0xa6, 0xea, 0xd1, 0x61, 0xfc, 0x02, 0xcd, 0x47, 0x86, 0xdc, 0x74, 0xc9, 0xa8, 0xb3, 0xca, - 0xc8, 0x6c, 0xde, 0x2a, 0x1f, 0x18, 0xf2, 0xc9, 0xc6, 0xe3, 0x47, 0xc5, 0x7a, 0xbc, 0x18, 0x3f, - 0x45, 0x33, 0x8d, 0xed, 0xee, 0xd0, 0xf3, 0x96, 0x71, 0x08, 0x8c, 0x8c, 0xd9, 0xf3, 0xc7, 0x65, - 0xbf, 0x12, 0xcc, 0x3e, 0x2a, 0xc3, 0x3b, 0x28, 0xdb, 0x70, 0xca, 0xa0, 0x35, 0x64, 0xa1, 0x51, - 0x73, 0x5b, 0x16, 0x40, 0xc6, 0x9d, 0xdc, 0xee, 0x0d, 0xa8, 0x40, 0xbd, 0x5f, 0x81, 0x6b, 0x11, - 0x9b, 0x17, 0xa7, 0x16, 0x90, 0xdb, 0x8e, 0x4d, 0xa1, 0xd3, 0x2e, 0xdc, 0x1f, 0xb0, 0xc4, 0xd5, - 0x95, 0x95, 0xa8, 0x9d, 0xad, 0xc4, 0xab, 0x68, 0x8c, 0x81, 0xcc, 0xa9, 0x49, 0x26, 0x1c, 0x8f, - 0x85, 0x4e, 0xbb, 0x30, 0xdf, 0xf3, 0x70, 0xa5, 0x0f, 0x1f, 0xd9, 0xea, 0x6e, 0x20, 0xde, 0x41, - 0xd8, 0x62, 0x54, 0x65, 0xb2, 0x61, 0x3b, 0x78, 0x55, 0x41, 0x8e, 0x7c, 0xbe, 0xd3, 0x2e, 0x64, - 0xc3, 0x72, 0x7b, 0x73, 0x63, 0x04, 0xee, 0xce, 0x56, 0xfc, 0x84, 0xa0, 0xaa, 0x90, 0x4c, 0xaa, - 0x4a, 0x45, 0x65, 0x78, 0x15, 0xdd, 0x6e, 0xca, 0x86, 0xa6, 0x9f, 0x56, 0x15, 0x32, 0x99, 0x94, - 0x86, 0x1f, 0x86, 0x2b, 0x28, 0xcb, 0x5b, 0x96, 0x45, 0x99, 0x00, 0xa5, 0xa2, 0xb7, 0xb8, 0x00, - 0xc6, 0xc9, 0x54, 0x92, 0xb6, 0x3f, 0x1e, 0x3f, 0x43, 0xf3, 0xde, 0x77, 0x53, 0xec, 0xe9, 0xb2, - 0x68, 0x52, 0x66, 0x1c, 0x70, 0x50, 0xc8, 0x74, 0x92, 0x51, 0xbc, 0x06, 0x7f, 0x85, 0x48, 0xdf, - 0x07, 0xde, 0xde, 0xce, 0x24, 0xf9, 0x0d, 0x94, 0xe1, 0x35, 0x34, 0xb1, 0xbb, 0xef, 0x79, 0xdc, - 0x49, 0xf2, 0xe8, 0xc5, 0xd9, 0xd5, 0x0d, 0x9d, 0x92, 0x3a, 0x6d, 0x09, 0x20, 0xd9, 0xc4, 0xea, - 0xf6, 0x0b, 0xf0, 0x27, 0x28, 0x13, 0xa8, 0x39, 0xc1, 0x49, 0xfa, 0x60, 0xa4, 0x9d, 0xb4, 0x60, - 0xb2, 0xc9, 0xed, 0xbd, 0x26, 0xb3, 0x89, 0x49, 0xfb, 0x71, 0xce, 0x6c, 0x32, 0x03, 0x53, 0x54, - 0x8e, 0x34, 0x5d, 0x21, 0x73, 0xc9, 0xb3, 0xf5, 0x22, 0x31, 0xa0, 0x62, 0x28, 0xf9, 0xaa, 0xb2, - 0xdb, 0xdc, 0xef, 0x82, 0xa0, 0x42, 0x0d, 0x8b, 0x9a, 0x60, 0x0a, 0x32, 0x9f, 0xe4, 0x97, 0xc2, - 0x00, 0x6f, 0xa3, 0x29, 0xde, 0x38, 0x02, 0x43, 0xf6, 0xaa, 0x71, 0x37, 0x15, 0x43, 0xc2, 0xa2, - 0xe2, 0x02, 0xba, 0x17, 0x61, 0x79, 0x1d, 0xb8, 0x45, 0x4d, 0x0e, 0xc5, 0xd7, 0xa3, 0x68, 0xba, - 0xc6, 0xd5, 0x3a, 0x1c, 0xd3, 0xef, 0xe0, 0x1a, 0xf3, 0xef, 0x10, 0xf3, 0x9f, 0xa1, 0x69, 0x06, - 0xc7, 0x34, 0x3d, 0xe3, 0x23, 0xe1, 0xef, 0x01, 0xe0, 0xfb, 0x4e, 0x2b, 0x1a, 0xe6, 0xb4, 0x12, - 0x74, 0x37, 0x7c, 0x22, 0xfd, 0xc3, 0xfa, 0x26, 0x83, 0xb2, 0x35, 0xae, 0xee, 0x31, 0x7a, 0xac, - 0xd9, 0xa1, 0xd7, 0xe7, 0xf5, 0x1d, 0x9e, 0xd7, 0x4d, 0x34, 0x63, 0x79, 0x85, 0x90, 0xf5, 0x34, - 0x07, 0x36, 0x1a, 0x7f, 0x7d, 0x25, 0xb9, 0xbe, 0x92, 0x5c, 0x5f, 0x49, 0x3e, 0xcc, 0x2b, 0xc9, - 0x7d, 0xb4, 0xd0, 0x47, 0x72, 0x9f, 0xf3, 0xff, 0x64, 0x9c, 0x0b, 0xcb, 0x81, 0x65, 0x7b, 0x55, - 0xfc, 0x5f, 0xb1, 0x55, 0xb3, 0x49, 0x71, 0x15, 0x8d, 0x37, 0x18, 0xc8, 0x82, 0x0e, 0x8d, 0x7b, - 0x4f, 0xff, 0x9e, 0xf1, 0xfe, 0xcb, 0x38, 0x18, 0xa6, 0x63, 0x7d, 0x0c, 0x0b, 0xab, 0xf1, 0x9c, - 0xef, 0x03, 0x2b, 0x35, 0x34, 0x01, 0x86, 0x25, 0x4e, 0x83, 0x48, 0x0a, 0x83, 0x1e, 0xa3, 0x5b, - 0x8a, 0x4f, 0xf7, 0xba, 0xf3, 0x1a, 0x3f, 0xf1, 0x51, 0xeb, 0xe2, 0xfa, 0xe3, 0x4e, 0xbb, 0x50, - 0x18, 0xe4, 0x1a, 0x85, 0x6e, 0x09, 0x8d, 0xd2, 0x57, 0x76, 0xbf, 0x76, 0x31, 0x4d, 0x06, 0x56, - 0xca, 0x0d, 0x73, 0xd7, 0x12, 0xa6, 0x2b, 0x4a, 0xbd, 0x96, 0x30, 0x5e, 0x77, 0x63, 0x89, 0x90, - 0x49, 0xe7, 0x16, 0xc7, 0x86, 0xcd, 0x30, 0x1b, 0x26, 0xd3, 0x39, 0x85, 0x28, 0xb1, 0x1b, 0xdb, - 0x83, 0xa6, 0x52, 0xe6, 0x14, 0xd3, 0x8d, 0x0e, 0x92, 0x59, 0x7e, 0xa1, 0xe7, 0x00, 0xaa, 0x7f, - 0x73, 0x21, 0xd5, 0x2f, 0x74, 0x1e, 0xcc, 0xf7, 0x8d, 0x20, 0x2a, 0xef, 0xa4, 0x73, 0x0b, 0x40, - 0xf3, 0x49, 0xa0, 0x6d, 0x66, 0xd3, 0xa9, 0x7b, 0x0d, 0xb4, 0x16, 0xd7, 0x40, 0x71, 0x3a, 0x97, - 0x98, 0x56, 0xba, 0x11, 0x6c, 0x55, 0xb3, 0x29, 0x97, 0xd2, 0x6b, 0x5a, 0x9b, 0x71, 0xfc, 0x4f, - 0x71, 0xa2, 0x02, 0x9d, 0x80, 0xfe, 0x87, 0x4e, 0x70, 0xa1, 0xf3, 0xff, 0xd7, 0x13, 0x3e, 0x42, - 0x85, 0x01, 0xd4, 0xf7, 0x3b, 0xc3, 0xef, 0x37, 0x9d, 0xce, 0xb0, 0x0d, 0x3a, 0x7c, 0x38, 0x9d, - 0xa1, 0x36, 0xb8, 0x33, 0x0c, 0x71, 0x4d, 0xee, 0x6e, 0x73, 0xdc, 0x16, 0x7a, 0xdb, 0xfc, 0xf0, - 0xc7, 0x71, 0x34, 0x52, 0xe3, 0x2a, 0xfe, 0x59, 0x42, 0x93, 0xa1, 0x47, 0xc0, 0x9f, 0x97, 0x86, - 0x7a, 0xfe, 0x5c, 0x8a, 0x3c, 0x7e, 0xc8, 0x3d, 0xbf, 0x1a, 0x1f, 0x2f, 0x61, 0xfc, 0x93, 0x84, - 0x32, 0xc1, 0x67, 0x18, 0x3b, 0xc3, 0xfb, 0x07, 0x6c, 0x72, 0xb5, 0x2b, 0xb1, 0xf1, 0xb3, 0xfc, - 0x45, 0x42, 0xd3, 0x91, 0x1f, 0xaf, 0x4f, 0x87, 0x9f, 0x21, 0xec, 0x94, 0xdb, 0xbb, 0x2a, 0x27, - 0x3f, 0xdd, 0xdf, 0x24, 0x34, 0x17, 0x7b, 0x07, 0xbb, 0x44, 0xf5, 0xe2, 0xfc, 0x72, 0x2f, 0xaf, - 0xd6, 0x2f, 0xb4, 0x80, 0x58, 0x54, 0x5c, 0x62, 0x01, 0x71, 0x7e, 0x97, 0x59, 0x40, 0xd2, 0xf7, - 0x70, 0x0b, 0xde, 0x9c, 0xe5, 0xa5, 0xb7, 0x67, 0x79, 0xe9, 0xef, 0xb3, 0xbc, 0xf4, 0xfa, 0x3c, - 0x7f, 0xe3, 0xed, 0x79, 0xfe, 0xc6, 0x5f, 0xe7, 0xf9, 0x1b, 0x5f, 0x3f, 0x53, 0x35, 0x71, 0xd4, - 0x3a, 0xb4, 0xcd, 0xca, 0xee, 0xdc, 0xcb, 0xde, 0xe4, 0xe5, 0xc0, 0xe4, 0xcb, 0xbd, 0xc9, 0x96, - 0xdd, 0xe9, 0xcb, 0x27, 0xe5, 0xe0, 0xff, 0x4a, 0xa7, 0x16, 0xf0, 0xc3, 0x31, 0xe7, 0x3f, 0x9f, - 0xb5, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x16, 0xf8, 0x33, 0x9c, 0x72, 0x1a, 0x00, 0x00, + 0x14, 0xc7, 0xd9, 0x10, 0x20, 0x8c, 0xf9, 0x11, 0x0f, 0x90, 0x0c, 0x8e, 0x64, 0x53, 0xf7, 0xc2, + 0x21, 0xd8, 0x10, 0x42, 0x2a, 0x35, 0xa2, 0x15, 0x18, 0xda, 0x58, 0xa9, 0x03, 0x35, 0x21, 0x87, + 0xf6, 0x50, 0x2d, 0xde, 0xe7, 0x65, 0xd5, 0xdd, 0x9d, 0xed, 0xcc, 0x98, 0x40, 0xef, 0xbd, 0xe7, + 0x7f, 0xa8, 0xd4, 0x53, 0x8f, 0x3d, 0x57, 0xea, 0x2d, 0xea, 0x29, 0xea, 0xa9, 0x27, 0xab, 0x82, + 0xff, 0xc0, 0xea, 0x1f, 0x50, 0xed, 0xae, 0xbd, 0xde, 0x5d, 0xaf, 0x97, 0xad, 0x41, 0x49, 0xa4, + 0xf8, 0x66, 0x86, 0xf7, 0xfd, 0xce, 0x9b, 0x79, 0xe3, 0xcf, 0x1b, 0xef, 0xa2, 0xcf, 0x7e, 0xd4, + 0xd4, 0x23, 0x00, 0x59, 0xd7, 0x35, 0xd9, 0xac, 0x41, 0x51, 0xd1, 0xb8, 0x60, 0xda, 0x51, 0x43, + 0x80, 0x52, 0xa3, 0x86, 0xe5, 0x8e, 0xea, 0xa0, 0xa8, 0xc0, 0x8a, 0xdd, 0x81, 0xa2, 0x38, 0x2d, + 0x58, 0x8c, 0x0a, 0x8a, 0x37, 0x82, 0xfa, 0x42, 0x8c, 0xbe, 0xd0, 0x1d, 0xc8, 0x2c, 0xd6, 0x28, + 0x37, 0x28, 0xff, 0xce, 0x31, 0x29, 0xba, 0x7f, 0xb8, 0x8e, 0x99, 0x79, 0x95, 0xaa, 0xd4, 0x1d, + 0xb7, 0x3f, 0xb9, 0xa3, 0xf9, 0x3f, 0x53, 0x68, 0xb6, 0xc2, 0xd5, 0x12, 0x30, 0xa1, 0xd5, 0xcf, + 0x2a, 0x54, 0x01, 0x1d, 0x7f, 0x89, 0xc6, 0xb9, 0xa6, 0x9a, 0xc0, 0x88, 0xb4, 0x24, 0x2d, 0x4f, + 0x6e, 0x17, 0x5b, 0xcd, 0xdc, 0xdc, 0x89, 0xac, 0x6b, 0x8a, 0x2c, 0xe0, 0xd3, 0x3c, 0x83, 0x1f, + 0x1a, 0x1a, 0x03, 0x25, 0xff, 0xd7, 0x6f, 0x2b, 0xf3, 0xed, 0x29, 0xb6, 0x14, 0x85, 0x01, 0xe7, + 0x07, 0x82, 0x69, 0xa6, 0x5a, 0x6d, 0xcb, 0xf1, 0x2a, 0x1a, 0x3d, 0xd1, 0x14, 0x72, 0x63, 0x49, + 0x5a, 0x1e, 0xdb, 0xce, 0xb6, 0x9a, 0xb9, 0x4c, 0xd7, 0x45, 0x15, 0xb0, 0xb9, 0x76, 0x5f, 0x17, + 0xb0, 0xf9, 0x68, 0x63, 0x63, 0x7d, 0x23, 0x5f, 0xb5, 0x43, 0x6d, 0x85, 0xa5, 0x29, 0x64, 0x34, + 0x99, 0xc2, 0xd2, 0x14, 0xbc, 0x8c, 0x66, 0x39, 0xad, 0x8b, 0x97, 0x32, 0x83, 0x17, 0xc0, 0xb8, + 0x46, 0x4d, 0x72, 0x73, 0x49, 0x5a, 0x9e, 0xae, 0x86, 0x87, 0xf1, 0x73, 0xb4, 0x10, 0x1a, 0x72, + 0xd3, 0x25, 0x63, 0xce, 0x2a, 0x43, 0xb3, 0x75, 0x56, 0x79, 0xdf, 0x90, 0x4f, 0x37, 0x1f, 0x3d, + 0xcc, 0x57, 0xa3, 0xc5, 0xf8, 0x09, 0x9a, 0xad, 0xed, 0xb4, 0x87, 0x9e, 0x35, 0x8c, 0x23, 0x60, + 0x64, 0xdc, 0x9e, 0x3f, 0x2a, 0xfb, 0x55, 0x7f, 0xf6, 0x61, 0x19, 0xde, 0x45, 0xe9, 0x9a, 0x53, + 0x06, 0xad, 0x26, 0x0b, 0x8d, 0x9a, 0x3b, 0xb2, 0x00, 0x32, 0xe1, 0xe4, 0x76, 0xb7, 0x4f, 0x05, + 0xaa, 0xbd, 0x0a, 0x5c, 0x09, 0xd9, 0x3c, 0x3f, 0xb3, 0x80, 0xdc, 0x72, 0x6c, 0x72, 0xad, 0x66, + 0xee, 0x5e, 0x9f, 0x25, 0xae, 0xad, 0xae, 0x86, 0xed, 0x6c, 0x25, 0x5e, 0x43, 0xe3, 0x0c, 0x64, + 0x4e, 0x4d, 0x32, 0xe9, 0x78, 0x2c, 0xb6, 0x9a, 0xb9, 0x85, 0xae, 0x87, 0x2b, 0x7d, 0xf0, 0xd0, + 0x56, 0xb7, 0x03, 0xf1, 0x2e, 0xc2, 0x16, 0xa3, 0x2a, 0x93, 0x0d, 0xdb, 0xa1, 0x53, 0x15, 0xe4, + 0xc8, 0x17, 0x5a, 0xcd, 0x5c, 0x3a, 0x28, 0xb7, 0x37, 0x37, 0x42, 0xe0, 0xee, 0x6c, 0xc9, 0x4b, + 0x08, 0xca, 0x0a, 0x49, 0x25, 0xaa, 0x54, 0x58, 0x86, 0xd7, 0xd0, 0xad, 0xba, 0x6c, 0x68, 0xfa, + 0x59, 0x59, 0x21, 0x53, 0x71, 0x69, 0x78, 0x61, 0xb8, 0x84, 0xd2, 0xbc, 0x61, 0x59, 0x94, 0x09, + 0x50, 0x4a, 0x7a, 0x83, 0x0b, 0x60, 0x9c, 0x4c, 0xc7, 0x69, 0x7b, 0xe3, 0xf1, 0x53, 0xb4, 0xd0, + 0xf9, 0x6e, 0x8a, 0x7d, 0x5d, 0x16, 0x75, 0xca, 0x8c, 0x43, 0x0e, 0x0a, 0x99, 0x89, 0x33, 0x8a, + 0xd6, 0xe0, 0xaf, 0x11, 0xe9, 0xf9, 0x47, 0x67, 0x6f, 0x67, 0xe3, 0xfc, 0xfa, 0xca, 0xf0, 0x3a, + 0x9a, 0xdc, 0x3b, 0xe8, 0x78, 0xdc, 0x8e, 0xf3, 0xe8, 0xc6, 0xd9, 0xd5, 0x0d, 0x9c, 0x92, 0x2a, + 0x6d, 0x08, 0x20, 0xe9, 0xd8, 0xea, 0xf6, 0x0a, 0xf0, 0x27, 0x28, 0xe5, 0xab, 0x39, 0xc1, 0x71, + 0x7a, 0x7f, 0xa4, 0x9d, 0xb4, 0x60, 0xb2, 0xc9, 0xed, 0xbd, 0x26, 0x73, 0xb1, 0x49, 0x7b, 0x71, + 0xce, 0x6c, 0x32, 0x03, 0x53, 0x94, 0x8e, 0x35, 0x5d, 0x21, 0xf3, 0xf1, 0xb3, 0x75, 0x23, 0x31, + 0xa0, 0x7c, 0x20, 0xf9, 0xb2, 0xb2, 0x57, 0x3f, 0x68, 0x83, 0xa0, 0x44, 0x0d, 0x8b, 0x9a, 0x60, + 0x0a, 0xb2, 0x10, 0xe7, 0x97, 0xc0, 0x00, 0xef, 0xa0, 0x69, 0x5e, 0x3b, 0x06, 0x43, 0xee, 0x54, + 0xe3, 0x4e, 0x22, 0x86, 0x04, 0x45, 0xf9, 0x45, 0x74, 0x37, 0xc4, 0xf2, 0x2a, 0x70, 0x8b, 0x9a, + 0x1c, 0xf2, 0xaf, 0xc6, 0xd0, 0x4c, 0x85, 0xab, 0x55, 0x38, 0xa1, 0xdf, 0xc3, 0x10, 0xf3, 0xef, + 0x10, 0xf3, 0x9f, 0xa3, 0x19, 0x06, 0x27, 0x34, 0x39, 0xe3, 0x43, 0xe1, 0xef, 0x01, 0xe0, 0x7b, + 0x4e, 0x2b, 0x1a, 0xe4, 0xb4, 0x12, 0x74, 0x27, 0x78, 0x22, 0xbd, 0xc3, 0xfa, 0x3a, 0x85, 0xd2, + 0x15, 0xae, 0xee, 0x33, 0x7a, 0xa2, 0xd9, 0xa1, 0xc3, 0xf3, 0xfa, 0x0e, 0xcf, 0xeb, 0x16, 0x9a, + 0xb5, 0x3a, 0x85, 0x90, 0xf5, 0x24, 0x07, 0x36, 0x1c, 0x3f, 0xbc, 0x92, 0x0c, 0xaf, 0x24, 0xc3, + 0x2b, 0xc9, 0x87, 0x79, 0x25, 0xb9, 0x87, 0x16, 0x7b, 0x48, 0xee, 0x71, 0xfe, 0xdf, 0x94, 0x73, + 0x61, 0x39, 0xb4, 0x6c, 0xaf, 0x92, 0xf7, 0x2b, 0xb6, 0x6c, 0xd6, 0x29, 0x2e, 0xa3, 0x89, 0x1a, + 0x03, 0x59, 0xd0, 0x81, 0x71, 0xdf, 0xd1, 0xbf, 0x67, 0xbc, 0xff, 0x2a, 0x0a, 0x86, 0xc9, 0x58, + 0x1f, 0xc1, 0xc2, 0x72, 0x34, 0xe7, 0x7b, 0xc0, 0x4a, 0x0d, 0x4d, 0x80, 0x61, 0x89, 0x33, 0x3f, + 0x92, 0x82, 0xa0, 0xc7, 0xe8, 0xa6, 0xe2, 0xd1, 0xbd, 0xea, 0x7c, 0xc6, 0x8f, 0x3d, 0xd4, 0xba, + 0xb8, 0xfe, 0xb8, 0xd5, 0xcc, 0xe5, 0xfa, 0xb9, 0x86, 0xa1, 0x5b, 0x40, 0x63, 0xf4, 0xa5, 0xdd, + 0xaf, 0x5d, 0x4c, 0x93, 0xbe, 0x95, 0x72, 0xc3, 0xdc, 0xb5, 0x04, 0xe9, 0x8a, 0x12, 0xaf, 0x25, + 0x88, 0xd7, 0xbd, 0x48, 0x22, 0xa4, 0x92, 0xb9, 0x45, 0xb1, 0x61, 0x2b, 0xc8, 0x86, 0xa9, 0x64, + 0x4e, 0x01, 0x4a, 0xec, 0x45, 0xf6, 0xa0, 0xe9, 0x84, 0x39, 0x45, 0x74, 0xa3, 0xc3, 0x78, 0x96, + 0x5f, 0xea, 0xd9, 0x87, 0xea, 0xdf, 0x5e, 0x4a, 0xf5, 0x4b, 0x9d, 0xfb, 0xf3, 0x7d, 0xd3, 0x8f, + 0xca, 0xdb, 0xc9, 0xdc, 0x7c, 0xd0, 0x7c, 0xec, 0x6b, 0x9b, 0xe9, 0x64, 0xea, 0x6e, 0x03, 0xad, + 0x44, 0x35, 0x50, 0x9c, 0xcc, 0x25, 0xa2, 0x95, 0x6e, 0xfa, 0x5b, 0xd5, 0x5c, 0xc2, 0xa5, 0x74, + 0x9b, 0xd6, 0x56, 0x14, 0xff, 0x13, 0x9c, 0x28, 0x5f, 0x27, 0xa0, 0xff, 0xa3, 0x13, 0x5c, 0xea, + 0xfc, 0xf6, 0x7a, 0xc2, 0x47, 0x28, 0xd7, 0x87, 0xfa, 0x5e, 0x67, 0xf8, 0xe3, 0x86, 0xd3, 0x19, + 0x76, 0x40, 0x87, 0x0f, 0xa7, 0x33, 0x54, 0xfa, 0x77, 0x86, 0x01, 0xae, 0xc9, 0xed, 0x6d, 0x8e, + 0xda, 0xc2, 0xce, 0x36, 0x3f, 0xf8, 0x69, 0x02, 0x8d, 0x56, 0xb8, 0x8a, 0x7f, 0x91, 0xd0, 0x54, + 0xe0, 0x11, 0xf0, 0x17, 0x85, 0x81, 0x9e, 0x3f, 0x17, 0x42, 0x8f, 0x1f, 0x32, 0xcf, 0xae, 0xc7, + 0xa7, 0x93, 0x30, 0xfe, 0x59, 0x42, 0x29, 0xff, 0x33, 0x8c, 0xdd, 0xc1, 0xfd, 0x7d, 0x36, 0x99, + 0xca, 0xb5, 0xd8, 0x78, 0x59, 0xfe, 0x2a, 0xa1, 0x99, 0xd0, 0x8f, 0xd7, 0x27, 0x83, 0xcf, 0x10, + 0x74, 0xca, 0xec, 0x5f, 0x97, 0x93, 0x97, 0xee, 0xef, 0x12, 0x9a, 0x8f, 0xbc, 0x83, 0x5d, 0xa1, + 0x7a, 0x51, 0x7e, 0x99, 0x17, 0xd7, 0xeb, 0x17, 0x58, 0x40, 0x24, 0x2a, 0xae, 0xb0, 0x80, 0x28, + 0xbf, 0xab, 0x2c, 0x20, 0xee, 0x7b, 0xb8, 0x0d, 0xaf, 0xcf, 0xb3, 0xd2, 0x9b, 0xf3, 0xac, 0xf4, + 0xcf, 0x79, 0x56, 0x7a, 0x75, 0x91, 0x1d, 0x79, 0x73, 0x91, 0x1d, 0xf9, 0xfb, 0x22, 0x3b, 0xf2, + 0xcd, 0x53, 0x55, 0x13, 0xc7, 0x8d, 0x23, 0xdb, 0xac, 0xe8, 0xce, 0xbd, 0x12, 0xf5, 0x4e, 0x69, + 0xa5, 0x3b, 0xd9, 0x4a, 0xfb, 0xad, 0xd2, 0x69, 0xe0, 0xbd, 0xd2, 0x99, 0x05, 0xfc, 0x68, 0xdc, + 0x79, 0xe7, 0xb3, 0xfe, 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe0, 0xa5, 0xc1, 0x1f, 0x9d, 0x1a, + 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1349,7 +1352,7 @@ var _Msg_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "compliance/tx.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/compliance/tx.proto", } func (m *MsgCertifyModel) Marshal() (dAtA []byte, err error) { diff --git a/types/compliance/types.go b/x/compliance/types/types.go similarity index 100% rename from types/compliance/types.go rename to x/compliance/types/types.go diff --git a/x/dclauth/ante/ante.go b/x/dclauth/ante/ante.go index d175c1964..0b8263f5b 100644 --- a/x/dclauth/ante/ante.go +++ b/x/dclauth/ante/ante.go @@ -1,6 +1,7 @@ package ante import ( + "cosmossdk.io/errors" "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" "github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1" "github.com/cosmos/cosmos-sdk/crypto/keys/secp256r1" @@ -24,7 +25,7 @@ type HandlerOptions struct { // signer. func NewAnteHandler(options HandlerOptions) (sdk.AnteHandler, error) { if options.AccountKeeper == nil { - return nil, sdkerrors.Wrap(sdkerrors.ErrLogic, "account keeper is required for ante builder") + return nil, errors.Wrap(sdkerrors.ErrLogic, "account keeper is required for ante builder") } // sigGasConsumer := options.SigGasConsumer @@ -37,7 +38,6 @@ func NewAnteHandler(options HandlerOptions) (sdk.AnteHandler, error) { NewInfiniteGasSetUpContextDecorator(), // outermost AnteDecorator. SetUpContext must be called first // authante.NewSetUpContextDecorator(), // outermost AnteDecorator. SetUpContext must be called first - authante.NewRejectExtensionOptionsDecorator(), authante.NewValidateBasicDecorator(), authante.NewTxTimeoutHeightDecorator(), authante.NewValidateMemoDecorator(options.AccountKeeper), @@ -69,7 +69,7 @@ func DefaultSigVerificationGasConsumer( case *ed25519.PubKey: meter.ConsumeGas(params.SigVerifyCostED25519, "ante verify: ed25519") - return sdkerrors.Wrap(sdkerrors.ErrInvalidPubKey, "ED25519 public keys are unsupported") + return errors.Wrap(sdkerrors.ErrInvalidPubKey, "ED25519 public keys are unsupported") case *secp256k1.PubKey: meter.ConsumeGas(params.SigVerifyCostSecp256k1, "ante verify: secp256k1") @@ -95,6 +95,6 @@ func DefaultSigVerificationGasConsumer( */ default: - return sdkerrors.Wrapf(sdkerrors.ErrInvalidPubKey, "unrecognized public key type: %T", pubkey) + return errors.Wrapf(sdkerrors.ErrInvalidPubKey, "unrecognized public key type: %T", pubkey) } } diff --git a/x/dclauth/base/keeper/account.go b/x/dclauth/base/keeper/account.go index d42e2119d..be1b28e05 100644 --- a/x/dclauth/base/keeper/account.go +++ b/x/dclauth/base/keeper/account.go @@ -65,6 +65,6 @@ func (k Keeper) IterateAccounts(ctx sdk.Context, cb func(account types.Account) } // just a stub to have AccountKeeper.GetModuleAddress API filled. -func (k Keeper) GetModuleAddress(moduleName string) sdk.AccAddress { +func (k Keeper) GetModuleAddress(_ string) sdk.AccAddress { return nil } diff --git a/x/dclauth/base/keeper/grpc_query_account.go b/x/dclauth/base/keeper/grpc_query_account.go index a25facebe..595671ce6 100644 --- a/x/dclauth/base/keeper/grpc_query_account.go +++ b/x/dclauth/base/keeper/grpc_query_account.go @@ -8,9 +8,10 @@ import ( sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/query" "github.com/cosmos/cosmos-sdk/x/auth/types" - authtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" + + authtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) func (k Keeper) Accounts(c context.Context, req *types.QueryAccountsRequest) (*types.QueryAccountsResponse, error) { @@ -75,3 +76,31 @@ func (k Keeper) Params(c context.Context, req *types.QueryParamsRequest) (*types return &types.QueryParamsResponse{Params: params}, nil } + +func (k Keeper) AccountAddressByID(_ context.Context, _ *types.QueryAccountAddressByIDRequest) (resp *types.QueryAccountAddressByIDResponse, e error) { + return resp, nil +} + +func (k Keeper) ModuleAccounts(_ context.Context, _ *types.QueryModuleAccountsRequest) (resp *types.QueryModuleAccountsResponse, e error) { + return resp, nil +} + +func (k Keeper) ModuleAccountByName(_ context.Context, _ *types.QueryModuleAccountByNameRequest) (resp *types.QueryModuleAccountByNameResponse, e error) { + return resp, nil +} + +func (k Keeper) Bech32Prefix(_ context.Context, _ *types.Bech32PrefixRequest) (resp *types.Bech32PrefixResponse, e error) { + return resp, nil +} + +func (k Keeper) AddressBytesToString(_ context.Context, _ *types.AddressBytesToStringRequest) (resp *types.AddressBytesToStringResponse, e error) { + return resp, nil +} + +func (k Keeper) AddressStringToBytes(_ context.Context, _ *types.AddressStringToBytesRequest) (resp *types.AddressStringToBytesResponse, e error) { + return resp, nil +} + +func (k Keeper) AccountInfo(_ context.Context, _ *types.QueryAccountInfoRequest) (resp *types.QueryAccountInfoResponse, e error) { + return resp, nil +} diff --git a/x/dclauth/base/keeper/keeper.go b/x/dclauth/base/keeper/keeper.go index 73c658d7b..524f5e4dc 100644 --- a/x/dclauth/base/keeper/keeper.go +++ b/x/dclauth/base/keeper/keeper.go @@ -3,18 +3,19 @@ package keeper import ( "fmt" + "github.com/cometbft/cometbft/libs/log" "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" basetypes "github.com/cosmos/cosmos-sdk/x/auth/types" paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" - "github.com/tendermint/tendermint/libs/log" ) type ( Keeper struct { cdc codec.BinaryCodec - storeKey sdk.StoreKey - memKey sdk.StoreKey + storeKey storetypes.StoreKey + memKey storetypes.StoreKey paramSubspace paramtypes.Subspace } @@ -23,7 +24,7 @@ type ( func NewKeeper( cdc codec.BinaryCodec, storeKey, - memKey sdk.StoreKey, + memKey storetypes.StoreKey, ) *Keeper { return &Keeper{ cdc: cdc, diff --git a/x/dclauth/client/cli/query.go b/x/dclauth/client/cli/query.go index f4a20d849..fb10bf8b2 100644 --- a/x/dclauth/client/cli/query.go +++ b/x/dclauth/client/cli/query.go @@ -13,7 +13,7 @@ import ( ) // GetQueryCmd returns the cli query commands for this module. -func GetQueryCmd(queryRoute string) *cobra.Command { +func GetQueryCmd(_ string) *cobra.Command { // Group dclauth queries under a subcommand cmd := &cobra.Command{ Use: types.CmdName, diff --git a/x/dclauth/client/cli/query_account.go b/x/dclauth/client/cli/query_account.go index 208cbe15c..fecbcd40e 100644 --- a/x/dclauth/client/cli/query_account.go +++ b/x/dclauth/client/cli/query_account.go @@ -17,7 +17,10 @@ func CmdListAccount() *cobra.Command { Use: "all-accounts", Short: "list all Account", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -54,7 +57,10 @@ func CmdShowAccount() *cobra.Command { Short: "shows a Account", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } argAddress, err := sdk.AccAddressFromBech32(viper.GetString(FlagAddress)) if err != nil { diff --git a/x/dclauth/client/cli/query_account_test.go b/x/dclauth/client/cli/query_account_test.go index 69cb95916..21f88883f 100644 --- a/x/dclauth/client/cli/query_account_test.go +++ b/x/dclauth/client/cli/query_account_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/client/cli" diff --git a/x/dclauth/client/cli/query_pending_account.go b/x/dclauth/client/cli/query_pending_account.go index b63205bed..56a56f1aa 100644 --- a/x/dclauth/client/cli/query_pending_account.go +++ b/x/dclauth/client/cli/query_pending_account.go @@ -17,7 +17,10 @@ func CmdListPendingAccount() *cobra.Command { Use: "all-proposed-accounts", Short: "list all PendingAccount", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -54,7 +57,10 @@ func CmdShowPendingAccount() *cobra.Command { Short: "shows a PendingAccount", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } argAddress, err := sdk.AccAddressFromBech32(viper.GetString(FlagAddress)) if err != nil { diff --git a/x/dclauth/client/cli/query_pending_account_revocation.go b/x/dclauth/client/cli/query_pending_account_revocation.go index 15cc33289..9582f6b34 100644 --- a/x/dclauth/client/cli/query_pending_account_revocation.go +++ b/x/dclauth/client/cli/query_pending_account_revocation.go @@ -17,7 +17,10 @@ func CmdListPendingAccountRevocation() *cobra.Command { Use: "all-proposed-accounts-to-revoke", Short: "list all PendingAccountRevocation", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -54,7 +57,10 @@ func CmdShowPendingAccountRevocation() *cobra.Command { Short: "shows a PendingAccountRevocation", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } argAddress, err := sdk.AccAddressFromBech32(viper.GetString(FlagAddress)) if err != nil { diff --git a/x/dclauth/client/cli/query_pending_account_revocation_test.go b/x/dclauth/client/cli/query_pending_account_revocation_test.go index 21ec3431c..a88bdf818 100644 --- a/x/dclauth/client/cli/query_pending_account_revocation_test.go +++ b/x/dclauth/client/cli/query_pending_account_revocation_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/client/cli" diff --git a/x/dclauth/client/cli/query_pending_account_test.go b/x/dclauth/client/cli/query_pending_account_test.go index 83c0136bb..54c883555 100644 --- a/x/dclauth/client/cli/query_pending_account_test.go +++ b/x/dclauth/client/cli/query_pending_account_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/client/cli" diff --git a/x/dclauth/client/cli/query_rejected_account.go b/x/dclauth/client/cli/query_rejected_account.go index b9de8d466..0aa2004ff 100644 --- a/x/dclauth/client/cli/query_rejected_account.go +++ b/x/dclauth/client/cli/query_rejected_account.go @@ -17,7 +17,10 @@ func CmdListRejectedAccount() *cobra.Command { Use: "all-rejected-accounts", Short: "list all RejectedAccount", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -51,7 +54,10 @@ func CmdShowRejectedAccount() *cobra.Command { Short: "shows a RejectedAccount", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } argAddress, err := sdk.AccAddressFromBech32(viper.GetString(FlagAddress)) if err != nil { diff --git a/x/dclauth/client/cli/query_rejected_account_test.go b/x/dclauth/client/cli/query_rejected_account_test.go index c9b191728..70a3766c1 100644 --- a/x/dclauth/client/cli/query_rejected_account_test.go +++ b/x/dclauth/client/cli/query_rejected_account_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/x/dclauth/client/cli/query_revoked_account.go b/x/dclauth/client/cli/query_revoked_account.go index 9733e43c1..e0b522430 100644 --- a/x/dclauth/client/cli/query_revoked_account.go +++ b/x/dclauth/client/cli/query_revoked_account.go @@ -17,7 +17,10 @@ func CmdListRevokedAccount() *cobra.Command { Use: "all-revoked-accounts", Short: "list all RevokedAccount", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -51,7 +54,10 @@ func CmdShowRevokedAccount() *cobra.Command { Short: "shows a RevokedAccount", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } argAddress, err := sdk.AccAddressFromBech32(viper.GetString(FlagAddress)) if err != nil { diff --git a/x/dclauth/client/cli/query_revoked_account_test.go b/x/dclauth/client/cli/query_revoked_account_test.go index a63a37219..dfce810f6 100644 --- a/x/dclauth/client/cli/query_revoked_account_test.go +++ b/x/dclauth/client/cli/query_revoked_account_test.go @@ -8,7 +8,7 @@ package cli_test "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/x/dclauth/handler.go b/x/dclauth/handler.go index 6fada94f2..49659eb3f 100644 --- a/x/dclauth/handler.go +++ b/x/dclauth/handler.go @@ -3,8 +3,10 @@ package dclauth import ( "fmt" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) @@ -41,7 +43,7 @@ func NewHandler(k keeper.Keeper) sdk.Handler { default: errMsg := fmt.Sprintf("unrecognized %s message type: %T", types.ModuleName, msg) - return nil, sdkerrors.Wrap(sdkerrors.ErrUnknownRequest, errMsg) + return nil, errors.Wrap(sdkerrors.ErrUnknownRequest, errMsg) } } } diff --git a/x/dclauth/keeper/account.go b/x/dclauth/keeper/account.go index 0c040764e..a34bfe44e 100644 --- a/x/dclauth/keeper/account.go +++ b/x/dclauth/keeper/account.go @@ -174,11 +174,11 @@ func (k Keeper) CountAccountsWithRole(ctx sdk.Context, roleToCount types.Account } // just a stub to have AccountKeeper.GetModuleAddress API filled. -func (k Keeper) GetModuleAddress(moduleName string) sdk.AccAddress { +func (k Keeper) GetModuleAddress(_ string) sdk.AccAddress { return nil } -func (k Keeper) GetParams(ctx sdk.Context) (params authtypes.Params) { +func (k Keeper) GetParams(_ sdk.Context) (params authtypes.Params) { return authtypes.Params{ MaxMemoCharacters: types.DclMaxMemoCharacters, TxSigLimit: types.DclTxSigLimit, diff --git a/x/dclauth/keeper/account_stat_test.go b/x/dclauth/keeper/account_stat_test.go index 89b02326f..5e81f55e5 100644 --- a/x/dclauth/keeper/account_stat_test.go +++ b/x/dclauth/keeper/account_stat_test.go @@ -11,7 +11,7 @@ import ( "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) -func createTestAccountStat(keeper *keeper.Keeper, ctx sdk.Context) types.AccountStat { +func createTestAccountStat(keeper *keeper.Keeper, ctx sdk.Context) types.AccountStat { //nolint:unparam item := types.AccountStat{} keeper.SetAccountStat(ctx, item) diff --git a/x/dclauth/keeper/keeper.go b/x/dclauth/keeper/keeper.go index 2c40cab17..f375d92ff 100644 --- a/x/dclauth/keeper/keeper.go +++ b/x/dclauth/keeper/keeper.go @@ -4,24 +4,26 @@ import ( "fmt" "math" + "github.com/cometbft/cometbft/libs/log" "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/tendermint/tendermint/libs/log" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) type ( Keeper struct { cdc codec.BinaryCodec - storeKey sdk.StoreKey - memKey sdk.StoreKey + storeKey storetypes.StoreKey + memKey storetypes.StoreKey } ) func NewKeeper( cdc codec.BinaryCodec, storeKey, - memKey sdk.StoreKey, + memKey storetypes.StoreKey, ) *Keeper { return &Keeper{ cdc: cdc, @@ -38,11 +40,11 @@ func (k Keeper) Logger(ctx sdk.Context) log.Logger { // // for tests needs: to link dependent keepers, // need to explore the alternatives -func (k Keeper) StoreKey() sdk.StoreKey { +func (k Keeper) StoreKey() storetypes.StoreKey { return k.storeKey } -func (k Keeper) MemKey() sdk.StoreKey { +func (k Keeper) MemKey() storetypes.StoreKey { return k.memKey } @@ -58,3 +60,13 @@ func (k Keeper) AccountApprovalsCount(ctx sdk.Context, percent float64) int { func (k Keeper) AccountRejectApprovalsCount(ctx sdk.Context, percent float64) int { return k.CountAccountsWithRole(ctx, types.Trustee) - k.AccountApprovalsCount(ctx, percent) + 1 } + +func (k Keeper) UnmarshalAccount(bytes []byte) (authtypes.AccountI, error) { + var acc authtypes.AccountI + + return acc, k.cdc.UnmarshalInterface(bytes, &acc) +} + +func (k Keeper) GetCodec() codec.BinaryCodec { + return k.cdc +} diff --git a/x/dclauth/keeper/msg_server_approve_add_account.go b/x/dclauth/keeper/msg_server_approve_add_account.go index 01812e58f..2e815a2ac 100644 --- a/x/dclauth/keeper/msg_server_approve_add_account.go +++ b/x/dclauth/keeper/msg_server_approve_add_account.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" @@ -16,12 +17,12 @@ func (k msgServer) ApproveAddAccount( signerAddr, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: (%s)", err) } // check if sender has enough rights to create a validator node if !k.HasRole(ctx, signerAddr, types.Trustee) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgApproveAddAccount transaction should be signed by an account with the %s role", types.Trustee, ) @@ -29,7 +30,7 @@ func (k msgServer) ApproveAddAccount( accAddr, err := sdk.AccAddressFromBech32(msg.Address) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if pending account exists @@ -42,7 +43,7 @@ func (k msgServer) ApproveAddAccount( // check if pending account already has approval from signer if pendAcc.HasApprovalFrom(signerAddr) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "Pending account associated with the address=%v already has approval from=%v", msg.Address, msg.Signer, diff --git a/x/dclauth/keeper/msg_server_approve_revoke_account.go b/x/dclauth/keeper/msg_server_approve_revoke_account.go index e791205fb..0e5755ad7 100644 --- a/x/dclauth/keeper/msg_server_approve_revoke_account.go +++ b/x/dclauth/keeper/msg_server_approve_revoke_account.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" @@ -14,12 +15,12 @@ func (k msgServer) ApproveRevokeAccount(goCtx context.Context, msg *types.MsgApp signerAddr, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: (%s)", err) } // check that sender has enough rights to approve account revocation if !k.HasRole(ctx, signerAddr, types.Trustee) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgApproveRevokeAccount transaction should be signed by an account with the %s role", types.Trustee, ) @@ -27,7 +28,7 @@ func (k msgServer) ApproveRevokeAccount(goCtx context.Context, msg *types.MsgApp accAddr, err := sdk.AccAddressFromBech32(msg.Address) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check that pending account revocation exists @@ -40,7 +41,7 @@ func (k msgServer) ApproveRevokeAccount(goCtx context.Context, msg *types.MsgApp // check if pending account revocation already has approval from signer if revoc.HasRevocationFrom(signerAddr) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "Pending account revocation associated with the address=%v already has approval from=%v", msg.Address, msg.Signer, @@ -58,7 +59,7 @@ func (k msgServer) ApproveRevokeAccount(goCtx context.Context, msg *types.MsgApp if len(revoc.Approvals) >= k.AccountApprovalsCount(ctx, types.AccountApprovalsPercent) { // Move account to entity revoked account revokedAccount, err := k.AddAccountToRevokedAccount( - ctx, accAddr, revoc.Approvals, types.RevokedAccount_TrusteeVoting) + ctx, accAddr, revoc.Approvals, types.RevokedAccount_TrusteeVoting) //nolint:nosnakecase if err != nil { return nil, err } diff --git a/x/dclauth/keeper/msg_server_propose_add_account.go b/x/dclauth/keeper/msg_server_propose_add_account.go index b54a19a18..e94de1314 100644 --- a/x/dclauth/keeper/msg_server_propose_add_account.go +++ b/x/dclauth/keeper/msg_server_propose_add_account.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" @@ -16,12 +17,12 @@ func (k msgServer) ProposeAddAccount(goCtx context.Context, msg *types.MsgPropos signerAddr, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: (%s)", err) } // check if sender has enough rights to create a validator node if !k.HasRole(ctx, signerAddr, types.Trustee) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgProposeAddAccount transaction should be signed by an account with the %s role", types.Trustee, ) @@ -34,7 +35,7 @@ func (k msgServer) ProposeAddAccount(goCtx context.Context, msg *types.MsgPropos accAddr, err := sdk.AccAddressFromBech32(msg.Address) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if active account already exists. @@ -50,7 +51,7 @@ func (k msgServer) ProposeAddAccount(goCtx context.Context, msg *types.MsgPropos // parse the key. pk, ok := msg.PubKey.GetCachedValue().(cryptotypes.PubKey) if !ok { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidType, "Expecting cryptotypes.PubKey, got %T", pk) + return nil, errors.Wrapf(sdkerrors.ErrInvalidType, "Expecting cryptotypes.PubKey, got %T", pk) } ba := authtypes.NewBaseAccount(accAddr, pk, 0, 0) diff --git a/x/dclauth/keeper/msg_server_propose_revoke_account.go b/x/dclauth/keeper/msg_server_propose_revoke_account.go index b939735a2..34c0802a0 100644 --- a/x/dclauth/keeper/msg_server_propose_revoke_account.go +++ b/x/dclauth/keeper/msg_server_propose_revoke_account.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" @@ -13,12 +14,12 @@ func (k msgServer) ProposeRevokeAccount(goCtx context.Context, msg *types.MsgPro signerAddr, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: (%s)", err) } // check that sender has enough rights to propose account revocation if !k.HasRole(ctx, signerAddr, types.Trustee) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgProposeRevokeAccount transaction should be signed by an account with the %s role", types.Trustee, ) @@ -26,7 +27,7 @@ func (k msgServer) ProposeRevokeAccount(goCtx context.Context, msg *types.MsgPro accAddr, err := sdk.AccAddressFromBech32(msg.Address) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check that account exists @@ -54,7 +55,7 @@ func (k msgServer) ProposeRevokeAccount(goCtx context.Context, msg *types.MsgPro // Move account to entity revoked account revokedAccount, err := k.AddAccountToRevokedAccount( - ctx, accAddr, revokedApproval, types.RevokedAccount_TrusteeVoting) + ctx, accAddr, revokedApproval, types.RevokedAccount_TrusteeVoting) //nolint:nosnakecase if err != nil { return nil, err } diff --git a/x/dclauth/keeper/msg_server_reject_add_account.go b/x/dclauth/keeper/msg_server_reject_add_account.go index 94ecb840e..8315575b2 100644 --- a/x/dclauth/keeper/msg_server_reject_add_account.go +++ b/x/dclauth/keeper/msg_server_reject_add_account.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" @@ -15,12 +16,12 @@ func (k msgServer) RejectAddAccount( signerAddr, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: (%s)", err) } // check if sendor has enough rights to reject a validator node if !k.HasRole(ctx, signerAddr, types.Trustee) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgRejectAccount transaction should be signed by an account with the %s role", types.Trustee, ) @@ -28,7 +29,7 @@ func (k msgServer) RejectAddAccount( accAddr, err := sdk.AccAddressFromBech32(msg.Address) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if pending account exists @@ -41,7 +42,7 @@ func (k msgServer) RejectAddAccount( // check if pending account already has reject approval from signer if pendAcc.HasRejectApprovalFrom(signerAddr) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "Pending account associated with the address=%v already has reject from=%v", msg.Address, msg.Signer, diff --git a/x/dclauth/keeper/msg_server_test.go b/x/dclauth/keeper/msg_server_test.go index 85f7375f8..9cfe72671 100644 --- a/x/dclauth/keeper/msg_server_test.go +++ b/x/dclauth/keeper/msg_server_test.go @@ -10,7 +10,7 @@ import ( "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) -//nolint:deadcode,unused +//nolint:unused func setupMsgServer(tb testing.TB) (types.MsgServer, context.Context) { tb.Helper() k, ctx := keepertest.DclauthKeeper(tb) diff --git a/x/dclauth/keeper/revoked_account.go b/x/dclauth/keeper/revoked_account.go index 291e6947c..01045196c 100644 --- a/x/dclauth/keeper/revoked_account.go +++ b/x/dclauth/keeper/revoked_account.go @@ -64,7 +64,7 @@ func (k Keeper) GetAllRevokedAccount(ctx sdk.Context) (list []types.RevokedAccou func (k Keeper) AddAccountToRevokedAccount( ctx sdk.Context, accAddr sdk.AccAddress, approvals []*types.Grant, - reason types.RevokedAccount_Reason, + reason types.RevokedAccount_Reason, //nolint:nosnakecase ) (*types.RevokedAccount, error) { // get account account, ok := k.GetAccountO(ctx, accAddr) diff --git a/x/dclauth/module.go b/x/dclauth/module.go index b3ef2f1fd..b95955d09 100644 --- a/x/dclauth/module.go +++ b/x/dclauth/module.go @@ -5,16 +5,20 @@ import ( "encoding/json" "fmt" + "github.com/gorilla/mux" + "github.com/grpc-ecosystem/grpc-gateway/runtime" + "github.com/spf13/cobra" + + abci "github.com/cometbft/cometbft/abci/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/codec" cdctypes "github.com/cosmos/cosmos-sdk/codec/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/module" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + "github.com/cosmos/cosmos-sdk/x/auth/simulation" basetypes "github.com/cosmos/cosmos-sdk/x/auth/types" - "github.com/gorilla/mux" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/spf13/cobra" - abci "github.com/tendermint/tendermint/abci/types" + basekeeper "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/base/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/client/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/keeper" @@ -63,7 +67,7 @@ func (AppModuleBasic) DefaultGenesis(cdc codec.JSONCodec) json.RawMessage { } // ValidateGenesis performs genesis state validation for the dclauth module. -func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncodingConfig, bz json.RawMessage) error { +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, _ client.TxEncodingConfig, bz json.RawMessage) error { var genState types.GenesisState if err := cdc.UnmarshalJSON(bz, &genState); err != nil { return fmt.Errorf("failed to unmarshal %s genesis state: %w", types.ModuleName, err) @@ -73,7 +77,7 @@ func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncod } // RegisterRESTRoutes registers the dclauth module's REST service handlers. -func (AppModuleBasic) RegisterRESTRoutes(clientCtx client.Context, rtr *mux.Router) { +func (AppModuleBasic) RegisterRESTRoutes(_ client.Context, _ *mux.Router) { } // RegisterGRPCGatewayRoutes registers the gRPC Gateway routes for the module. @@ -117,16 +121,15 @@ func (am AppModule) Name() string { return am.AppModuleBasic.Name() } -// Route returns the dclauth module's message routing key. -func (am AppModule) Route() sdk.Route { - return sdk.NewRoute(types.RouterKey, NewHandler(am.keeper)) +func (am AppModule) GenerateGenesisState(simState *module.SimulationState) { + simulation.RandomizedGenState(simState, simulation.RandomGenesisAccounts) } -// QuerierRoute returns the dclauth module's query routing key. -func (AppModule) QuerierRoute() string { return types.QuerierRoute } +func (am AppModule) RegisterStoreDecoder(sdr sdk.StoreDecoderRegistry) { + sdr[types.StoreKey] = simulation.NewDecodeStore(am.keeper) +} -// LegacyQuerierHandler returns the dclauth module's Querier. -func (am AppModule) LegacyQuerierHandler(legacyQuerierCdc *codec.LegacyAmino) sdk.Querier { +func (am AppModule) WeightedOperations(_ module.SimulationState) []simtypes.WeightedOperation { return nil } @@ -135,7 +138,6 @@ func (am AppModule) LegacyQuerierHandler(legacyQuerierCdc *codec.LegacyAmino) sd func (am AppModule) RegisterServices(cfg module.Configurator) { types.RegisterMsgServer(cfg.MsgServer(), keeper.NewMsgServerImpl(am.keeper)) types.RegisterQueryServer(cfg.QueryServer(), am.keeper) - // This is to register /cosmos.auth.v1beta1.Query routes used by cosmjs libraries basetypes.RegisterQueryServer(cfg.QueryServer(), am.basekeeper) } diff --git a/x/dclauth/module_simulation.go b/x/dclauth/module_simulation.go index 35573f3bc..21a63f9c8 100644 --- a/x/dclauth/module_simulation.go +++ b/x/dclauth/module_simulation.go @@ -6,7 +6,7 @@ import ( "math/rand" "github.com/cosmos/cosmos-sdk/baseapp" - simappparams "github.com/cosmos/cosmos-sdk/simapp/params" + simappparams "cosmossdk.io/simapp/params" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/module" simtypes "github.com/cosmos/cosmos-sdk/types/simulation" diff --git a/x/dclauth/simulation/approve_add_account.go b/x/dclauth/simulation/approve_add_account.go index 13565cb4e..8accab262 100644 --- a/x/dclauth/simulation/approve_add_account.go +++ b/x/dclauth/simulation/approve_add_account.go @@ -6,13 +6,10 @@ import ( "github.com/cosmos/cosmos-sdk/baseapp" sdk "github.com/cosmos/cosmos-sdk/types" simtypes "github.com/cosmos/cosmos-sdk/types/simulation" - "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) -func SimulateMsgApproveAddAccount( - k keeper.Keeper, -) simtypes.Operation { +func SimulateMsgApproveAddAccount() simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { simAccount, _ := simtypes.RandomAcc(r, accs) diff --git a/x/dclauth/simulation/approve_revoke_account.go b/x/dclauth/simulation/approve_revoke_account.go index 2ddef7249..aa858ab62 100644 --- a/x/dclauth/simulation/approve_revoke_account.go +++ b/x/dclauth/simulation/approve_revoke_account.go @@ -6,13 +6,10 @@ import ( "github.com/cosmos/cosmos-sdk/baseapp" sdk "github.com/cosmos/cosmos-sdk/types" simtypes "github.com/cosmos/cosmos-sdk/types/simulation" - "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) -func SimulateMsgApproveRevokeAccount( - k keeper.Keeper, -) simtypes.Operation { +func SimulateMsgApproveRevokeAccount() simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { simAccount, _ := simtypes.RandomAcc(r, accs) diff --git a/x/dclauth/simulation/propose_add_account.go b/x/dclauth/simulation/propose_add_account.go index 2ecc19150..4bd9f2f43 100644 --- a/x/dclauth/simulation/propose_add_account.go +++ b/x/dclauth/simulation/propose_add_account.go @@ -6,13 +6,10 @@ import ( "github.com/cosmos/cosmos-sdk/baseapp" sdk "github.com/cosmos/cosmos-sdk/types" simtypes "github.com/cosmos/cosmos-sdk/types/simulation" - "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) -func SimulateMsgProposeAddAccount( - k keeper.Keeper, -) simtypes.Operation { +func SimulateMsgProposeAddAccount() simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { simAccount, _ := simtypes.RandomAcc(r, accs) diff --git a/x/dclauth/simulation/propose_revoke_account.go b/x/dclauth/simulation/propose_revoke_account.go index 8709493ae..be7113ea0 100644 --- a/x/dclauth/simulation/propose_revoke_account.go +++ b/x/dclauth/simulation/propose_revoke_account.go @@ -11,7 +11,7 @@ import ( ) func SimulateMsgProposeRevokeAccount( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/dclauth/simulation/reject_add_account.go b/x/dclauth/simulation/reject_add_account.go index 9d478b92f..f8d36a317 100644 --- a/x/dclauth/simulation/reject_add_account.go +++ b/x/dclauth/simulation/reject_add_account.go @@ -6,13 +6,10 @@ import ( "github.com/cosmos/cosmos-sdk/baseapp" sdk "github.com/cosmos/cosmos-sdk/types" simtypes "github.com/cosmos/cosmos-sdk/types/simulation" - "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) -func SimulateMsgRejectAddAccount( - k keeper.Keeper, -) simtypes.Operation { +func SimulateMsgRejectAddAccount() simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { simAccount, _ := simtypes.RandomAcc(r, accs) diff --git a/x/dclauth/types/account.go b/x/dclauth/types/account.go index 2ee1693a8..a63eefc92 100644 --- a/x/dclauth/types/account.go +++ b/x/dclauth/types/account.go @@ -17,6 +17,7 @@ package types import ( "encoding/json" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" @@ -47,7 +48,7 @@ func (role AccountRole) Validate() error { } } - return sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "Invalid Account Role: %v. Supported roles: [%v]", role, Roles) + return errors.Wrapf(sdkerrors.ErrUnknownRequest, "Invalid Account Role: %v. Supported roles: [%v]", role, Roles) } /* @@ -251,7 +252,7 @@ func (revoc PendingAccountRevocation) String() string { // Validate checks for errors on the vesting and module account parameters. func (revoc PendingAccountRevocation) Validate() error { if revoc.Address == "" { - return sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, + return errors.Wrapf(sdkerrors.ErrUnknownRequest, "Invalid Pending Account Revocation: Value: %s. Error: Missing Address", revoc.Address, ) } diff --git a/x/dclauth/types/account.pb.go b/x/dclauth/types/account.pb.go index cf99f42ad..1af0124b4 100644 --- a/x/dclauth/types/account.pb.go +++ b/x/dclauth/types/account.pb.go @@ -1,13 +1,13 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclauth/account.proto +// source: zigbeealliance/distributedcomplianceledger/dclauth/account.proto package types import ( fmt "fmt" types "github.com/cosmos/cosmos-sdk/x/auth/types" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" types1 "github.com/zigbee-alliance/distributed-compliance-ledger/x/common/types" io "io" math "math" @@ -40,7 +40,7 @@ type Account struct { func (m *Account) Reset() { *m = Account{} } func (*Account) ProtoMessage() {} func (*Account) Descriptor() ([]byte, []int) { - return fileDescriptor_3a2d3e1e8208016c, []int{0} + return fileDescriptor_b313ddcde3a636dd, []int{0} } func (m *Account) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -73,36 +73,39 @@ func init() { proto.RegisterType((*Account)(nil), "zigbeealliance.distributedcomplianceledger.dclauth.Account") } -func init() { proto.RegisterFile("dclauth/account.proto", fileDescriptor_3a2d3e1e8208016c) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclauth/account.proto", fileDescriptor_b313ddcde3a636dd) +} -var fileDescriptor_3a2d3e1e8208016c = []byte{ - // 412 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x92, 0xcf, 0x8a, 0xd4, 0x30, - 0x1c, 0xc7, 0x5b, 0xbb, 0xb3, 0x7f, 0x52, 0x41, 0xa8, 0x0a, 0x75, 0x0e, 0x6d, 0x11, 0x84, 0x5e, - 0x26, 0x61, 0x46, 0x10, 0xf4, 0x20, 0x58, 0x16, 0x74, 0xae, 0x15, 0x11, 0x3c, 0xb8, 0x24, 0xe9, - 0x8f, 0x6e, 0xa5, 0x6d, 0x4a, 0x92, 0x0e, 0xea, 0x13, 0x78, 0xf4, 0xe8, 0x71, 0xdf, 0xc0, 0xd7, - 0xf0, 0x38, 0x47, 0x4f, 0x83, 0xcc, 0xbc, 0x85, 0x27, 0x69, 0xd3, 0x3a, 0x73, 0x1d, 0xf6, 0xd6, - 0xe4, 0x4b, 0x3e, 0x9f, 0x6f, 0xf3, 0x0b, 0x7a, 0x98, 0xf1, 0x92, 0xb6, 0xfa, 0x9a, 0x50, 0xce, - 0x45, 0x5b, 0x6b, 0xdc, 0x48, 0xa1, 0x85, 0xb7, 0xf8, 0x5a, 0xe4, 0x0c, 0x80, 0x96, 0x65, 0x41, - 0x6b, 0x0e, 0x38, 0x2b, 0x94, 0x96, 0x05, 0x6b, 0x35, 0x64, 0x5c, 0x54, 0x8d, 0xd9, 0x2d, 0x21, - 0xcb, 0x41, 0xe2, 0x81, 0x30, 0x7d, 0x90, 0x8b, 0x5c, 0xf4, 0xc7, 0x49, 0xf7, 0x65, 0x48, 0xd3, - 0x80, 0x0b, 0x55, 0x09, 0x45, 0x7a, 0xc9, 0x6a, 0xce, 0x40, 0xd3, 0x79, 0xbf, 0x18, 0xf2, 0x47, - 0x5c, 0x54, 0x95, 0xa8, 0x49, 0x5b, 0xd4, 0x7a, 0xfe, 0xec, 0x4a, 0xd2, 0x3a, 0x87, 0x21, 0xba, - 0x3f, 0x76, 0xcb, 0x25, 0x1d, 0x9b, 0x3d, 0xfe, 0xe9, 0xa0, 0xb3, 0x57, 0xa6, 0xab, 0xb7, 0x44, - 0x77, 0x19, 0x55, 0x70, 0x35, 0x74, 0xf7, 0xed, 0xc8, 0x8e, 0xdd, 0x45, 0x84, 0x8d, 0x12, 0xf7, - 0x96, 0x41, 0x89, 0x13, 0xaa, 0x60, 0x38, 0x97, 0x9c, 0xac, 0x37, 0xa1, 0x9d, 0xba, 0x6c, 0xbf, - 0xe5, 0x3d, 0x41, 0x13, 0x29, 0x4a, 0x50, 0xfe, 0x9d, 0xc8, 0x89, 0x2f, 0x92, 0x7b, 0x7f, 0x37, - 0xa1, 0x3b, 0x64, 0xa9, 0x28, 0x21, 0x35, 0xa9, 0xf7, 0x1e, 0x5d, 0xd0, 0xa6, 0x91, 0x62, 0x45, - 0x4b, 0xe5, 0x3b, 0x91, 0x13, 0xbb, 0x8b, 0xe7, 0xf8, 0xf8, 0xbb, 0xc2, 0xaf, 0xbb, 0x3f, 0x4a, - 0xf7, 0x2c, 0x6f, 0x8a, 0xce, 0x57, 0x50, 0x67, 0x42, 0x2e, 0x2f, 0xfd, 0x93, 0xc8, 0x8e, 0x27, - 0xe9, 0xff, 0xb5, 0xf7, 0x16, 0x9d, 0x49, 0xf8, 0x04, 0x5c, 0x2b, 0x7f, 0x72, 0x5b, 0xe5, 0x48, - 0xf2, 0x3e, 0x22, 0xd4, 0x48, 0x91, 0xb5, 0x5c, 0x2f, 0x2f, 0x95, 0x7f, 0xda, 0x73, 0x5f, 0x1e, - 0xc3, 0x35, 0x73, 0xc3, 0xef, 0xfa, 0xb9, 0xa5, 0xdd, 0xd8, 0xd2, 0x03, 0xe2, 0x8b, 0xf3, 0x6f, - 0x37, 0xa1, 0xf5, 0xe3, 0x26, 0xb4, 0x12, 0xf6, 0x6b, 0x1b, 0xd8, 0xeb, 0x6d, 0x60, 0xff, 0xd9, - 0x06, 0xf6, 0xf7, 0x5d, 0x60, 0xad, 0x77, 0x81, 0xf5, 0x7b, 0x17, 0x58, 0x1f, 0xde, 0xe4, 0x85, - 0xbe, 0x6e, 0x59, 0x87, 0x22, 0xc6, 0x3c, 0x1b, 0xd5, 0xe4, 0x40, 0x3d, 0xdb, 0xbb, 0x67, 0x46, - 0x4e, 0x3e, 0x93, 0xf1, 0x6d, 0xe8, 0x2f, 0x0d, 0x28, 0x76, 0xda, 0x3f, 0x8e, 0xa7, 0xff, 0x02, - 0x00, 0x00, 0xff, 0xff, 0xf5, 0x78, 0x0b, 0x74, 0xcf, 0x02, 0x00, 0x00, +var fileDescriptor_b313ddcde3a636dd = []byte{ + // 420 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x92, 0x4f, 0xeb, 0xd3, 0x30, + 0x1c, 0xc6, 0x5b, 0xfb, 0xdb, 0xef, 0x4f, 0x2a, 0x08, 0xc5, 0x43, 0xd9, 0xa1, 0x2d, 0x82, 0xd0, + 0xcb, 0x52, 0x36, 0x41, 0xd0, 0xc3, 0xd0, 0x32, 0xd0, 0x5d, 0x2b, 0x22, 0x78, 0x70, 0x24, 0xe9, + 0x97, 0xae, 0xd2, 0x36, 0x25, 0x49, 0x87, 0xfa, 0x0a, 0x3c, 0x7a, 0xf4, 0xb8, 0x77, 0xe0, 0xdb, + 0xf0, 0xb8, 0xa3, 0xa7, 0x21, 0xdb, 0xbb, 0xf0, 0x24, 0x6d, 0xb3, 0x4d, 0x41, 0x84, 0xe9, 0x2d, + 0xc9, 0x97, 0x7c, 0x9e, 0x3c, 0x79, 0x1e, 0xf4, 0xe4, 0x43, 0x9e, 0x51, 0x00, 0x52, 0x14, 0x39, + 0xa9, 0x18, 0x44, 0x69, 0x2e, 0x95, 0xc8, 0x69, 0xa3, 0x20, 0x65, 0xbc, 0xac, 0xfb, 0xd3, 0x02, + 0xd2, 0x0c, 0x44, 0x94, 0xb2, 0x82, 0x34, 0x6a, 0x19, 0x11, 0xc6, 0x78, 0x53, 0x29, 0x5c, 0x0b, + 0xae, 0xb8, 0x33, 0xf9, 0x9d, 0x80, 0xff, 0x42, 0xc0, 0x9a, 0x30, 0xbc, 0x9b, 0xf1, 0x8c, 0x77, + 0xd7, 0xa3, 0x76, 0xd5, 0x93, 0x86, 0x1e, 0xe3, 0xb2, 0xe4, 0x32, 0xea, 0x44, 0x56, 0x63, 0x0a, + 0x8a, 0x8c, 0xbb, 0x8d, 0x9e, 0xcf, 0xce, 0x78, 0x2b, 0xe3, 0x65, 0xc9, 0xab, 0xa8, 0xc9, 0x2b, + 0x35, 0x7e, 0xb8, 0x10, 0xa4, 0xca, 0x40, 0x53, 0xa6, 0xff, 0xe0, 0x38, 0x13, 0xe4, 0xe0, 0xf7, + 0xde, 0x17, 0x0b, 0x5d, 0x3d, 0xed, 0x7f, 0xc0, 0x99, 0xa3, 0xdb, 0x94, 0x48, 0x58, 0xe8, 0x1f, + 0x71, 0xcd, 0xc0, 0x0c, 0xed, 0x49, 0x80, 0x7b, 0x23, 0xb8, 0x7b, 0xbb, 0x36, 0x82, 0x63, 0x22, + 0x41, 0xdf, 0x8b, 0x2f, 0x36, 0x5b, 0xdf, 0x4c, 0x6c, 0x7a, 0x3a, 0x72, 0xee, 0xa3, 0x81, 0xe0, + 0x05, 0x48, 0xf7, 0x56, 0x60, 0x85, 0x37, 0xf1, 0x9d, 0x1f, 0x5b, 0xdf, 0xd6, 0xb3, 0x84, 0x17, + 0x90, 0xf4, 0x53, 0xe7, 0x15, 0xba, 0x21, 0x75, 0x2d, 0xf8, 0x8a, 0x14, 0xd2, 0xb5, 0x02, 0x2b, + 0xb4, 0x27, 0x8f, 0xf0, 0xf9, 0x09, 0xe0, 0x67, 0xad, 0xa3, 0xe4, 0xc4, 0x72, 0x86, 0xe8, 0x7a, + 0x05, 0x55, 0xca, 0xc5, 0x7c, 0xe6, 0x5e, 0x04, 0x66, 0x38, 0x48, 0x8e, 0x7b, 0xe7, 0x05, 0xba, + 0x12, 0xf0, 0x16, 0x98, 0x92, 0xee, 0xe0, 0x7f, 0x25, 0x0f, 0x24, 0xe7, 0x0d, 0x42, 0xb5, 0xe0, + 0x69, 0xc3, 0xd4, 0x7c, 0x26, 0xdd, 0xcb, 0x8e, 0x3b, 0x3d, 0x87, 0xdb, 0x47, 0x8c, 0x5f, 0x76, + 0x11, 0x27, 0x6d, 0xc2, 0xc9, 0x2f, 0xc4, 0xc7, 0xd7, 0x1f, 0xd7, 0xbe, 0xf1, 0x79, 0xed, 0x1b, + 0x31, 0xfd, 0xba, 0xf3, 0xcc, 0xcd, 0xce, 0x33, 0xbf, 0xef, 0x3c, 0xf3, 0xd3, 0xde, 0x33, 0x36, + 0x7b, 0xcf, 0xf8, 0xb6, 0xf7, 0x8c, 0xd7, 0xcf, 0xb3, 0x5c, 0x2d, 0x1b, 0xda, 0xa2, 0xa2, 0x5e, + 0x79, 0xf4, 0xa7, 0x5e, 0x8c, 0x4e, 0xda, 0x23, 0xdd, 0x8c, 0x77, 0xc7, 0x6e, 0xa8, 0xf7, 0x35, + 0x48, 0x7a, 0xd9, 0x95, 0xe3, 0xc1, 0xcf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xce, 0x0c, 0xab, 0xc9, + 0x50, 0x03, 0x00, 0x00, } func (m *Account) Marshal() (dAtA []byte, err error) { diff --git a/x/dclauth/types/account_stat.pb.go b/x/dclauth/types/account_stat.pb.go index 1dbfc7a41..2d0f2506d 100644 --- a/x/dclauth/types/account_stat.pb.go +++ b/x/dclauth/types/account_stat.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclauth/account_stat.proto +// source: zigbeealliance/distributedcomplianceledger/dclauth/account_stat.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -30,7 +30,7 @@ func (m *AccountStat) Reset() { *m = AccountStat{} } func (m *AccountStat) String() string { return proto.CompactTextString(m) } func (*AccountStat) ProtoMessage() {} func (*AccountStat) Descriptor() ([]byte, []int) { - return fileDescriptor_2d7f051f29b648b0, []int{0} + return fileDescriptor_3193f134e215a09e, []int{0} } func (m *AccountStat) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -70,23 +70,25 @@ func init() { proto.RegisterType((*AccountStat)(nil), "zigbeealliance.distributedcomplianceledger.dclauth.AccountStat") } -func init() { proto.RegisterFile("dclauth/account_stat.proto", fileDescriptor_2d7f051f29b648b0) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclauth/account_stat.proto", fileDescriptor_3193f134e215a09e) +} -var fileDescriptor_2d7f051f29b648b0 = []byte{ - // 196 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4a, 0x49, 0xce, 0x49, - 0x2c, 0x2d, 0xc9, 0xd0, 0x4f, 0x4c, 0x4e, 0xce, 0x2f, 0xcd, 0x2b, 0x89, 0x2f, 0x2e, 0x49, 0x2c, - 0xd1, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x32, 0xaa, 0xca, 0x4c, 0x4f, 0x4a, 0x4d, 0x4d, 0xcc, - 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, 0x2a, 0x2d, - 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, 0xa7, 0x16, 0xe9, 0x41, - 0x8d, 0x51, 0x52, 0xe5, 0xe2, 0x76, 0x84, 0x98, 0x14, 0x5c, 0x92, 0x58, 0x22, 0x24, 0xc6, 0xc5, - 0x96, 0x57, 0x9a, 0x9b, 0x94, 0x5a, 0x24, 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0x12, 0x04, 0xe5, 0x39, - 0x25, 0x9d, 0x78, 0x24, 0xc7, 0x78, 0xe1, 0x91, 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x13, 0x1e, - 0xcb, 0x31, 0x5c, 0x78, 0x2c, 0xc7, 0x70, 0xe3, 0xb1, 0x1c, 0x43, 0x94, 0x47, 0x7a, 0x66, 0x49, - 0x46, 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x3e, 0xc4, 0x7e, 0x5d, 0x98, 0x03, 0xf4, 0x91, 0x1c, - 0xa0, 0x8b, 0x70, 0x81, 0x2e, 0xc4, 0x09, 0xfa, 0x15, 0xfa, 0x30, 0xbf, 0x94, 0x54, 0x16, 0xa4, - 0x16, 0x27, 0xb1, 0x81, 0x7d, 0x61, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0x54, 0x33, 0x83, 0x86, - 0xe3, 0x00, 0x00, 0x00, +var fileDescriptor_3193f134e215a09e = []byte{ + // 198 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0xad, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0xa7, 0x24, 0xe7, 0x24, 0x96, 0x96, 0x64, 0xe8, 0x27, 0x26, 0x27, 0xe7, 0x97, + 0xe6, 0x95, 0xc4, 0x17, 0x97, 0x24, 0x96, 0xe8, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x19, 0xa1, + 0x1a, 0xa3, 0x87, 0xc7, 0x18, 0x3d, 0xa8, 0x31, 0x4a, 0xaa, 0x5c, 0xdc, 0x8e, 0x10, 0x93, 0x82, + 0x4b, 0x12, 0x4b, 0x84, 0xc4, 0xb8, 0xd8, 0xf2, 0x4a, 0x73, 0x93, 0x52, 0x8b, 0x24, 0x18, 0x15, + 0x18, 0x35, 0x58, 0x82, 0xa0, 0x3c, 0xa7, 0xa4, 0x13, 0x8f, 0xe4, 0x18, 0x2f, 0x3c, 0x92, 0x63, + 0x7c, 0xf0, 0x48, 0x8e, 0x71, 0xc2, 0x63, 0x39, 0x86, 0x0b, 0x8f, 0xe5, 0x18, 0x6e, 0x3c, 0x96, + 0x63, 0x88, 0xf2, 0x48, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x87, 0xd8, + 0xaf, 0x8b, 0xcd, 0x1f, 0xba, 0x08, 0x17, 0xe8, 0x42, 0x7d, 0x52, 0x01, 0xf7, 0x4b, 0x49, 0x65, + 0x41, 0x6a, 0x71, 0x12, 0x1b, 0xd8, 0x17, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0xef, 0xc4, + 0xb0, 0x14, 0x0e, 0x01, 0x00, 0x00, } func (m *AccountStat) Marshal() (dAtA []byte, err error) { diff --git a/x/dclauth/types/codec.go b/x/dclauth/types/codec.go index b85b86a5e..e6627e0d3 100644 --- a/x/dclauth/types/codec.go +++ b/x/dclauth/types/codec.go @@ -35,7 +35,7 @@ func RegisterInterfaces(registry cdctypes.InterfaceRegistry) { ) // this line is used by starport scaffolding # 3 - msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) + msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) //nolint:nosnakecase } var ( diff --git a/x/dclauth/types/errors.go b/x/dclauth/types/errors.go index c3d5e8263..770069046 100644 --- a/x/dclauth/types/errors.go +++ b/x/dclauth/types/errors.go @@ -5,57 +5,57 @@ package types import ( "fmt" - sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "cosmossdk.io/errors" ) // x/dclauth module sentinel errors. var ( - AccountAlreadyExists = sdkerrors.Register(ModuleName, 101, "account already exists") - AccountDoesNotExist = sdkerrors.Register(ModuleName, 102, "account not found") - PendingAccountAlreadyExists = sdkerrors.Register(ModuleName, 103, "pending account already exists") - PendingAccountDoesNotExist = sdkerrors.Register(ModuleName, 104, "pending account not found") - PendingAccountRevocationAlreadyExists = sdkerrors.Register(ModuleName, 105, "pending account revocation already exists") - PendingAccountRevocationDoesNotExist = sdkerrors.Register(ModuleName, 106, "pending account revocation not found") - MissingVendorIDForVendorAccount = sdkerrors.Register(ModuleName, 107, "no Vendor ID provided") - MissingRoles = sdkerrors.Register(ModuleName, 108, "no roles provided") + AccountAlreadyExists = errors.Register(ModuleName, 101, "account already exists") + AccountDoesNotExist = errors.Register(ModuleName, 102, "account not found") + PendingAccountAlreadyExists = errors.Register(ModuleName, 103, "pending account already exists") + PendingAccountDoesNotExist = errors.Register(ModuleName, 104, "pending account not found") + PendingAccountRevocationAlreadyExists = errors.Register(ModuleName, 105, "pending account revocation already exists") + PendingAccountRevocationDoesNotExist = errors.Register(ModuleName, 106, "pending account revocation not found") + MissingVendorIDForVendorAccount = errors.Register(ModuleName, 107, "no Vendor ID provided") + MissingRoles = errors.Register(ModuleName, 108, "no roles provided") ) func ErrAccountAlreadyExists(address interface{}) error { - return sdkerrors.Wrapf(AccountAlreadyExists, + return errors.Wrapf(AccountAlreadyExists, fmt.Sprintf("Account associated with the address=%v already exists on the ledger", address)) } func ErrAccountDoesNotExist(address interface{}) error { - return sdkerrors.Wrapf(AccountDoesNotExist, + return errors.Wrapf(AccountDoesNotExist, fmt.Sprintf("No account associated with the address=%v on the ledger", address)) } func ErrPendingAccountAlreadyExists(address interface{}) error { - return sdkerrors.Wrapf(PendingAccountAlreadyExists, + return errors.Wrapf(PendingAccountAlreadyExists, fmt.Sprintf("Pending account associated with the address=%v already exists on the ledger", address)) } func ErrPendingAccountDoesNotExist(address interface{}) error { - return sdkerrors.Wrapf(PendingAccountDoesNotExist, + return errors.Wrapf(PendingAccountDoesNotExist, fmt.Sprintf("No pending account associated with the address=%v on the ledger", address)) } func ErrPendingAccountRevocationAlreadyExists(address interface{}) error { - return sdkerrors.Wrapf(PendingAccountRevocationAlreadyExists, + return errors.Wrapf(PendingAccountRevocationAlreadyExists, fmt.Sprintf("Pending account revocation associated with the address=%v already exists on the ledger", address)) } func ErrPendingAccountRevocationDoesNotExist(address interface{}) error { - return sdkerrors.Wrapf(PendingAccountRevocationDoesNotExist, + return errors.Wrapf(PendingAccountRevocationDoesNotExist, fmt.Sprintf("No pending account revocation associated with the address=%v on the ledger", address)) } func ErrMissingVendorIDForVendorAccount() error { - return sdkerrors.Wrapf(MissingVendorIDForVendorAccount, + return errors.Wrapf(MissingVendorIDForVendorAccount, "No Vendor ID is provided in the Vendor Role for the new account") } func ErrMissingRoles() error { - return sdkerrors.Wrapf(MissingRoles, + return errors.Wrapf(MissingRoles, "No roles provided") } diff --git a/x/dclauth/types/genesis.pb.go b/x/dclauth/types/genesis.pb.go index e4cff48bc..abfe23fbe 100644 --- a/x/dclauth/types/genesis.pb.go +++ b/x/dclauth/types/genesis.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclauth/genesis.proto +// source: zigbeealliance/distributedcomplianceledger/dclauth/genesis.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -37,7 +37,7 @@ func (m *GenesisState) Reset() { *m = GenesisState{} } func (m *GenesisState) String() string { return proto.CompactTextString(m) } func (*GenesisState) ProtoMessage() {} func (*GenesisState) Descriptor() ([]byte, []int) { - return fileDescriptor_b9b33ac10a70c596, []int{0} + return fileDescriptor_5b66b543e193054f, []int{0} } func (m *GenesisState) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -112,35 +112,37 @@ func init() { proto.RegisterType((*GenesisState)(nil), "zigbeealliance.distributedcomplianceledger.dclauth.GenesisState") } -func init() { proto.RegisterFile("dclauth/genesis.proto", fileDescriptor_b9b33ac10a70c596) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclauth/genesis.proto", fileDescriptor_5b66b543e193054f) +} -var fileDescriptor_b9b33ac10a70c596 = []byte{ - // 394 bytes of a gzipped FileDescriptorProto +var fileDescriptor_5b66b543e193054f = []byte{ + // 399 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x93, 0x3f, 0x4f, 0xc2, 0x40, - 0x18, 0xc6, 0x5b, 0xf9, 0x33, 0x14, 0xa7, 0xaa, 0x09, 0x21, 0x5a, 0x89, 0x13, 0x0b, 0x6d, 0x82, - 0xa3, 0x83, 0x01, 0x07, 0x1d, 0x18, 0x4c, 0xdd, 0x5c, 0xc8, 0xf5, 0xfa, 0xe6, 0x38, 0x2d, 0xbd, - 0xa6, 0xbd, 0x1a, 0xd4, 0x2f, 0xe1, 0xe4, 0x67, 0x62, 0x64, 0x74, 0x32, 0x04, 0xbe, 0x88, 0x69, - 0xef, 0x0a, 0x2d, 0xa0, 0x89, 0xb0, 0x35, 0xef, 0xdb, 0xfe, 0x7e, 0x4f, 0x9e, 0xde, 0x69, 0x27, - 0x2e, 0xf6, 0x50, 0xcc, 0x87, 0x16, 0x01, 0x1f, 0x22, 0x1a, 0x99, 0x41, 0xc8, 0x38, 0xd3, 0x3b, - 0x6f, 0x94, 0x38, 0x00, 0xc8, 0xf3, 0x28, 0xf2, 0x31, 0x98, 0x2e, 0x8d, 0x78, 0x48, 0x9d, 0x98, - 0x83, 0x8b, 0xd9, 0x28, 0x10, 0x53, 0x0f, 0x5c, 0x02, 0xa1, 0x29, 0x09, 0x8d, 0x25, 0x0a, 0x61, - 0xcc, 0x62, 0x9f, 0x0b, 0x54, 0xe3, 0x2c, 0x1b, 0x07, 0xe0, 0xbb, 0xd4, 0x27, 0x83, 0xe2, 0xba, - 0xf5, 0xcb, 0x7a, 0x10, 0xc2, 0x0b, 0xc3, 0x88, 0x53, 0xe6, 0xcb, 0x37, 0x1b, 0x6b, 0xfc, 0x41, - 0xc4, 0xd1, 0x86, 0x24, 0xf9, 0xea, 0x19, 0xdc, 0x35, 0x89, 0xb1, 0x5a, 0x3f, 0x01, 0xe6, 0x1b, - 0xfb, 0x63, 0xc2, 0x08, 0x4b, 0x1f, 0xad, 0xe4, 0x49, 0x4c, 0x2f, 0x66, 0x15, 0xed, 0xf0, 0x56, - 0xd4, 0xf2, 0xc0, 0x11, 0x07, 0x1d, 0x6b, 0x35, 0xf9, 0x5d, 0x9f, 0x46, 0xbc, 0xae, 0x36, 0x4b, - 0xad, 0x5a, 0xe7, 0xca, 0xfc, 0x7f, 0x57, 0x66, 0x57, 0x60, 0x7a, 0xe5, 0xc9, 0xf7, 0xb9, 0x62, - 0xe7, 0xa9, 0xfa, 0x58, 0xd3, 0x65, 0x15, 0xdd, 0x9c, 0xeb, 0x20, 0x75, 0xf5, 0x76, 0x71, 0xdd, - 0x17, 0x68, 0x52, 0xb9, 0xc5, 0xa1, 0x7f, 0xaa, 0xda, 0x69, 0x71, 0x6c, 0x2f, 0xff, 0x41, 0x1a, - 0xa2, 0x94, 0x86, 0xe8, 0xef, 0x1f, 0x62, 0xc5, 0x95, 0x71, 0xfe, 0xf4, 0xea, 0x68, 0xd9, 0x7b, - 0xf2, 0x1f, 0xea, 0xe5, 0xa6, 0xda, 0xaa, 0x75, 0xae, 0xf7, 0xe8, 0x3d, 0xc1, 0xd8, 0x79, 0x66, - 0xd2, 0xba, 0x3c, 0x3a, 0xf9, 0xd6, 0x2b, 0xbb, 0xb7, 0x6e, 0x17, 0x68, 0x59, 0xeb, 0x9b, 0x0e, - 0xfd, 0x5d, 0x3b, 0xca, 0x4e, 0x65, 0x5e, 0x5d, 0x4d, 0xd5, 0x37, 0xbb, 0xa9, 0x0b, 0x38, 0xe9, - 0xde, 0x66, 0xe9, 0x39, 0x93, 0xb9, 0xa1, 0x4e, 0xe7, 0x86, 0x3a, 0x9b, 0x1b, 0xea, 0xc7, 0xc2, - 0x50, 0xa6, 0x0b, 0x43, 0xf9, 0x5a, 0x18, 0xca, 0xe3, 0x1d, 0xa1, 0x7c, 0x18, 0x3b, 0x26, 0x66, - 0x23, 0x4b, 0x64, 0x68, 0x67, 0x21, 0xac, 0x5c, 0x88, 0xf6, 0x2a, 0x45, 0x5b, 0xc4, 0xb0, 0xc6, - 0x56, 0x76, 0xdb, 0xf8, 0x6b, 0x00, 0x91, 0x53, 0x4d, 0x6f, 0xd3, 0xe5, 0x4f, 0x00, 0x00, 0x00, - 0xff, 0xff, 0x61, 0x1d, 0x73, 0x1c, 0x6b, 0x04, 0x00, 0x00, + 0x18, 0xc6, 0x5b, 0xf9, 0x33, 0x14, 0xa7, 0xea, 0x40, 0x88, 0xa9, 0xc4, 0x89, 0x85, 0x36, 0xc1, + 0xd1, 0x41, 0xc1, 0x18, 0x31, 0x61, 0x30, 0x65, 0x73, 0x21, 0xd7, 0xeb, 0x9b, 0x72, 0x5a, 0x7a, + 0x4d, 0x7b, 0x35, 0xa8, 0x5f, 0xc2, 0xc9, 0xcf, 0xc4, 0xc8, 0xe8, 0x64, 0x08, 0x7c, 0x11, 0xd3, + 0xf6, 0x80, 0x56, 0x08, 0x89, 0xd7, 0xad, 0xb9, 0xa4, 0xbf, 0xdf, 0x73, 0xcf, 0xbd, 0xaf, 0x72, + 0xf3, 0x4e, 0x1c, 0x0b, 0x00, 0xb9, 0x2e, 0x41, 0x1e, 0x06, 0xc3, 0x26, 0x21, 0x0b, 0x88, 0x15, + 0x31, 0xb0, 0x31, 0x9d, 0xf8, 0xe9, 0xa9, 0x0b, 0xb6, 0x03, 0x81, 0x61, 0x63, 0x17, 0x45, 0x6c, + 0x6c, 0x38, 0xe0, 0x41, 0x48, 0x42, 0xdd, 0x0f, 0x28, 0xa3, 0x6a, 0x27, 0x4f, 0xd0, 0x0f, 0x10, + 0x74, 0x4e, 0x68, 0x88, 0x58, 0x11, 0xc6, 0x34, 0xf2, 0x58, 0x6a, 0x6d, 0xf4, 0x05, 0x08, 0x3e, + 0x78, 0x36, 0xf1, 0x9c, 0x51, 0x9e, 0x34, 0x2c, 0x4e, 0x1a, 0x05, 0xf0, 0x4a, 0x31, 0x62, 0x84, + 0x7a, 0x1c, 0x7a, 0x27, 0x7e, 0xc1, 0x51, 0xc8, 0x50, 0x91, 0x5b, 0xc6, 0x59, 0x5e, 0xc0, 0xfe, + 0x73, 0xcb, 0x07, 0x21, 0xd2, 0x33, 0x60, 0xb6, 0x83, 0x3a, 0x75, 0xa8, 0x43, 0x93, 0x4f, 0x23, + 0xfe, 0x4a, 0x4f, 0x2f, 0x16, 0x15, 0xe5, 0xf8, 0x3e, 0x1d, 0x8c, 0x21, 0x43, 0x0c, 0x54, 0xac, + 0xd4, 0xf8, 0x7f, 0x03, 0x12, 0xb2, 0xba, 0xdc, 0x2c, 0xb5, 0x6a, 0x9d, 0x2b, 0xfd, 0xff, 0xd3, + 0xa2, 0x77, 0x53, 0x4c, 0xaf, 0x3c, 0xfb, 0x39, 0x97, 0xcc, 0x2c, 0x55, 0x9d, 0x2a, 0x2a, 0x7f, + 0x8b, 0x6e, 0xc6, 0x75, 0x94, 0xb8, 0x7a, 0x22, 0xae, 0xc7, 0x1c, 0x8d, 0x2b, 0xf7, 0x38, 0xd4, + 0x2f, 0x59, 0x39, 0xcb, 0x1f, 0x9b, 0x9b, 0x21, 0x48, 0x42, 0x94, 0x92, 0x10, 0x83, 0xe2, 0x21, + 0xb6, 0x5c, 0x1e, 0xe7, 0xa0, 0x57, 0x45, 0x9b, 0xde, 0xe3, 0x77, 0xa8, 0x97, 0x9b, 0x72, 0xab, + 0xd6, 0xb9, 0x2e, 0xd0, 0x7b, 0x8c, 0x31, 0xb3, 0xcc, 0xb8, 0x75, 0x3e, 0x65, 0xd9, 0xd6, 0x2b, + 0xe2, 0xad, 0x9b, 0x39, 0xda, 0xba, 0xf5, 0x5d, 0x87, 0xfa, 0xa1, 0x9c, 0xac, 0xa7, 0x32, 0xab, + 0xae, 0x26, 0xea, 0x5b, 0x31, 0x75, 0x0e, 0xc7, 0xdd, 0xfb, 0x2c, 0x3d, 0x6b, 0xb6, 0xd4, 0xe4, + 0xf9, 0x52, 0x93, 0x17, 0x4b, 0x4d, 0xfe, 0x5c, 0x69, 0xd2, 0x7c, 0xa5, 0x49, 0xdf, 0x2b, 0x4d, + 0x7a, 0xea, 0x3b, 0x84, 0x8d, 0x23, 0x4b, 0xc7, 0x74, 0x62, 0xa4, 0x19, 0xda, 0xfb, 0x36, 0xad, + 0xbd, 0x4d, 0xd1, 0xe6, 0xbb, 0x36, 0xdd, 0x6c, 0x1b, 0x7b, 0xf3, 0x21, 0xb4, 0xaa, 0xc9, 0x36, + 0x5d, 0xfe, 0x06, 0x00, 0x00, 0xff, 0xff, 0x19, 0xda, 0xde, 0x2c, 0x98, 0x05, 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { diff --git a/x/dclauth/types/grant.pb.go b/x/dclauth/types/grant.pb.go index 242f50e4a..20e513f18 100644 --- a/x/dclauth/types/grant.pb.go +++ b/x/dclauth/types/grant.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclauth/grant.proto +// source: zigbeealliance/distributedcomplianceledger/dclauth/grant.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -33,7 +33,7 @@ func (m *Grant) Reset() { *m = Grant{} } func (m *Grant) String() string { return proto.CompactTextString(m) } func (*Grant) ProtoMessage() {} func (*Grant) Descriptor() ([]byte, []int) { - return fileDescriptor_9332917d54e22b21, []int{0} + return fileDescriptor_86ef6cca43825bdb, []int{0} } func (m *Grant) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -87,26 +87,28 @@ func init() { proto.RegisterType((*Grant)(nil), "zigbeealliance.distributedcomplianceledger.dclauth.Grant") } -func init() { proto.RegisterFile("dclauth/grant.proto", fileDescriptor_9332917d54e22b21) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclauth/grant.proto", fileDescriptor_86ef6cca43825bdb) +} -var fileDescriptor_9332917d54e22b21 = []byte{ - // 249 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x4c, 0x90, 0x3d, 0x4e, 0xc3, 0x30, - 0x18, 0x86, 0x63, 0xca, 0x8f, 0xc8, 0x18, 0x18, 0x02, 0x83, 0x55, 0x31, 0x75, 0x49, 0x2c, 0x95, - 0x13, 0xd0, 0x05, 0xe6, 0xb0, 0xb1, 0x20, 0xff, 0x7c, 0xb8, 0x96, 0x92, 0x38, 0xb2, 0xbf, 0x48, - 0xc0, 0x29, 0x38, 0x0c, 0x87, 0x60, 0xac, 0x98, 0x18, 0x51, 0x72, 0x11, 0x14, 0xbb, 0x11, 0xdd, - 0x5e, 0xbf, 0x7e, 0xfd, 0x48, 0x8f, 0xd3, 0x0b, 0x25, 0x6b, 0xde, 0xe3, 0x96, 0x69, 0xc7, 0x5b, - 0x2c, 0x3b, 0x67, 0xd1, 0x66, 0xeb, 0x77, 0xa3, 0x05, 0x00, 0xaf, 0x6b, 0xc3, 0x5b, 0x09, 0xa5, - 0x32, 0x1e, 0x9d, 0x11, 0x3d, 0x82, 0x92, 0xb6, 0xe9, 0x62, 0x5b, 0x83, 0xd2, 0xe0, 0xca, 0xfd, - 0xfb, 0xeb, 0x2b, 0x69, 0x7d, 0x63, 0xfd, 0x73, 0x20, 0xb0, 0x78, 0x88, 0xb8, 0x1b, 0x99, 0x9e, - 0xdc, 0x4f, 0xf4, 0x6c, 0x9d, 0x9e, 0x71, 0xa5, 0x1c, 0x78, 0x9f, 0x93, 0x25, 0x59, 0x9d, 0x6f, - 0xf2, 0xef, 0xcf, 0xe2, 0x72, 0xbf, 0xbd, 0x8b, 0x37, 0x8f, 0xe8, 0x4c, 0xab, 0xab, 0x79, 0x98, - 0x65, 0xe9, 0x31, 0x9a, 0x06, 0xf2, 0xa3, 0x25, 0x59, 0x2d, 0xaa, 0x90, 0xa7, 0xce, 0xb4, 0x2f, - 0x36, 0x5f, 0x4c, 0x90, 0x2a, 0xe4, 0x8d, 0xf8, 0x1a, 0x28, 0xd9, 0x0d, 0x94, 0xfc, 0x0e, 0x94, - 0x7c, 0x8c, 0x34, 0xd9, 0x8d, 0x34, 0xf9, 0x19, 0x69, 0xf2, 0xf4, 0xa0, 0x0d, 0x6e, 0x7b, 0x51, - 0x4a, 0xdb, 0xb0, 0x28, 0x56, 0xcc, 0x66, 0xec, 0xc0, 0xac, 0xf8, 0x57, 0x2b, 0xa2, 0x1b, 0x7b, - 0x65, 0xf3, 0xef, 0xe0, 0x5b, 0x07, 0x5e, 0x9c, 0x06, 0x9f, 0xdb, 0xbf, 0x00, 0x00, 0x00, 0xff, - 0xff, 0x63, 0x02, 0x2c, 0x41, 0x35, 0x01, 0x00, 0x00, +var fileDescriptor_86ef6cca43825bdb = []byte{ + // 251 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0xab, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0xa7, 0x24, 0xe7, 0x24, 0x96, 0x96, 0x64, 0xe8, 0xa7, 0x17, 0x25, 0xe6, 0x95, + 0xe8, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x19, 0xa1, 0xea, 0xd7, 0xc3, 0xa3, 0x5f, 0x0f, 0xaa, + 0x5f, 0x4a, 0x32, 0x39, 0xbf, 0x38, 0x37, 0xbf, 0x38, 0x1e, 0x6c, 0x82, 0x3e, 0x84, 0x03, 0x31, + 0x4e, 0x29, 0x99, 0x8b, 0xd5, 0x1d, 0x64, 0xba, 0x90, 0x11, 0x17, 0x7b, 0x62, 0x4a, 0x4a, 0x51, + 0x6a, 0x71, 0xb1, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0xa7, 0x93, 0xc4, 0xa5, 0x2d, 0xba, 0x22, 0x50, + 0xb5, 0x8e, 0x10, 0x99, 0xe0, 0x92, 0xa2, 0xcc, 0xbc, 0xf4, 0x20, 0x98, 0x42, 0x21, 0x21, 0x2e, + 0x96, 0x92, 0xcc, 0xdc, 0x54, 0x09, 0x26, 0x05, 0x46, 0x0d, 0xe6, 0x20, 0x30, 0x1b, 0x24, 0x96, + 0x99, 0x97, 0x96, 0x2f, 0xc1, 0x0c, 0x32, 0x24, 0x08, 0xcc, 0x76, 0x4a, 0x3a, 0xf1, 0x48, 0x8e, + 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, 0xe4, 0x18, 0x27, 0x3c, 0x96, 0x63, 0xb8, 0xf0, 0x58, + 0x8e, 0xe1, 0xc6, 0x63, 0x39, 0x86, 0x28, 0x8f, 0xf4, 0xcc, 0x92, 0x8c, 0xd2, 0x24, 0xbd, 0xe4, + 0xfc, 0x5c, 0x7d, 0x88, 0xc7, 0x74, 0xb1, 0x85, 0x8c, 0x2e, 0xc2, 0x6b, 0xba, 0xd0, 0xb0, 0xa9, + 0x80, 0x87, 0x4e, 0x49, 0x65, 0x41, 0x6a, 0x71, 0x12, 0x1b, 0xd8, 0x3f, 0xc6, 0x80, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x0f, 0xb6, 0x99, 0x19, 0x60, 0x01, 0x00, 0x00, } func (m *Grant) Marshal() (dAtA []byte, err error) { diff --git a/x/dclauth/types/message_approve_add_account.go b/x/dclauth/types/message_approve_add_account.go index 6316579be..0a6bb391a 100644 --- a/x/dclauth/types/message_approve_add_account.go +++ b/x/dclauth/types/message_approve_add_account.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" @@ -46,21 +47,21 @@ func (msg *MsgApproveAddAccount) GetSignBytes() []byte { func (msg *MsgApproveAddAccount) ValidateBasic() error { if msg.Address == "" { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Account Address: it cannot be empty") + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Account Address: it cannot be empty") } _, err := sdk.AccAddressFromBech32(msg.Address) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid Account Address: (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid Account Address: (%s)", err) } if msg.Signer == "" { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: it cannot be empty") + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: it cannot be empty") } _, err = sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid Signer: (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid Signer: (%s)", err) } err = validator.Validate(msg) diff --git a/x/dclauth/types/message_approve_revoke_account.go b/x/dclauth/types/message_approve_revoke_account.go index 47a01edee..8ba9b6d47 100644 --- a/x/dclauth/types/message_approve_revoke_account.go +++ b/x/dclauth/types/message_approve_revoke_account.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" @@ -46,21 +47,21 @@ func (msg *MsgApproveRevokeAccount) GetSignBytes() []byte { func (msg *MsgApproveRevokeAccount) ValidateBasic() error { if msg.Address == "" { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Account Address: it cannot be empty") + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Account Address: it cannot be empty") } _, err := sdk.AccAddressFromBech32(msg.Address) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid Account Address: (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid Account Address: (%s)", err) } if msg.Signer == "" { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: it cannot be empty") + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: it cannot be empty") } _, err = sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid Signer: (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid Signer: (%s)", err) } err = validator.Validate(msg) diff --git a/x/dclauth/types/message_propose_add_account.go b/x/dclauth/types/message_propose_add_account.go index 46f754d1d..328c27d61 100644 --- a/x/dclauth/types/message_propose_add_account.go +++ b/x/dclauth/types/message_propose_add_account.go @@ -5,10 +5,12 @@ import ( "fmt" "time" + "cosmossdk.io/errors" codectypes "github.com/cosmos/cosmos-sdk/codec/types" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" "github.com/zigbee-alliance/distributed-compliance-ledger/x/common/types" ) @@ -97,34 +99,34 @@ func (msg *MsgProposeAddAccount) GetSignBytes() []byte { func (msg *MsgProposeAddAccount) ValidateBasic() error { if msg.Address == "" { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Account Address: it cannot be empty") + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Account Address: it cannot be empty") } accAddr, err := sdk.AccAddressFromBech32(msg.Address) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Account Address: (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Account Address: (%s)", err) } if msg.Signer == "" { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: it cannot be empty") + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: it cannot be empty") } _, err = sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: (%s)", err) } if msg.PubKey == nil { - return sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "Invalid PublicKey: it cannot be empty") + return errors.Wrapf(sdkerrors.ErrUnknownRequest, "Invalid PublicKey: it cannot be empty") } pk, err2 := msg.PubKey.GetCachedValue().(cryptotypes.PubKey) if !err2 { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidType, "expecting cryptotypes.PubKey for PubKey, got %T", err) + return errors.Wrapf(sdkerrors.ErrInvalidType, "expecting cryptotypes.PubKey for PubKey, got %T", err) } if !bytes.Equal(pk.Address().Bytes(), accAddr.Bytes()) { - return sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "account address and pubkey address do not match") + return errors.Wrapf(sdkerrors.ErrUnknownRequest, "account address and pubkey address do not match") } if len(msg.Roles) == 0 { @@ -144,7 +146,7 @@ func (msg *MsgProposeAddAccount) ValidateBasic() error { err = msg.HasValidProductIDs() if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "Invalid ProductID ranges are provided: %s", err) + return errors.Wrapf(sdkerrors.ErrInvalidRequest, "Invalid ProductID ranges are provided: %s", err) } err = validator.Validate(msg) diff --git a/x/dclauth/types/message_propose_add_account_test.go b/x/dclauth/types/message_propose_add_account_test.go index 296426ebd..d2db8d83f 100644 --- a/x/dclauth/types/message_propose_add_account_test.go +++ b/x/dclauth/types/message_propose_add_account_test.go @@ -5,6 +5,7 @@ import ( fmt "fmt" "testing" + "cosmossdk.io/errors" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" @@ -90,7 +91,7 @@ func TestValidateMsgProposeAddAccount(t *testing.T) { valid: false, msg: NewMsgProposeAddAccountWrapper(t, testconstants.Signer, testconstants.Address1, testconstants.PubKey1, AccountRoles{}, 1, testconstants.ProductIDsEmpty), // no roles provided - err: sdkerrors.Wrapf(MissingRoles, + err: errors.Wrapf(MissingRoles, "No roles provided"), }, // zero VID with Vendor role - error - can not create Vendor with vid=0 (reserved) @@ -98,7 +99,7 @@ func TestValidateMsgProposeAddAccount(t *testing.T) { valid: false, msg: NewMsgProposeAddAccountWrapper(t, testconstants.Signer, testconstants.Address1, testconstants.PubKey1, AccountRoles{Vendor, NodeAdmin}, 0, testconstants.ProductIDsEmpty), - err: sdkerrors.Wrapf(MissingVendorIDForVendorAccount, + err: errors.Wrapf(MissingVendorIDForVendorAccount, "No Vendor ID is provided in the Vendor Role for the new account"), }, // negative VID - error @@ -106,7 +107,7 @@ func TestValidateMsgProposeAddAccount(t *testing.T) { valid: false, msg: NewMsgProposeAddAccountWrapper(t, testconstants.Signer, testconstants.Address1, testconstants.PubKey1, AccountRoles{Vendor, NodeAdmin}, -1, testconstants.ProductIDsEmpty), - err: sdkerrors.Wrapf(MissingVendorIDForVendorAccount, + err: errors.Wrapf(MissingVendorIDForVendorAccount, "No Vendor ID is provided in the Vendor Role for the new account"), }, // too large VID - error diff --git a/x/dclauth/types/message_propose_revoke_account.go b/x/dclauth/types/message_propose_revoke_account.go index 1e2fefa5e..c4f1673f8 100644 --- a/x/dclauth/types/message_propose_revoke_account.go +++ b/x/dclauth/types/message_propose_revoke_account.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" @@ -46,21 +47,21 @@ func (msg *MsgProposeRevokeAccount) GetSignBytes() []byte { func (msg *MsgProposeRevokeAccount) ValidateBasic() error { if msg.Address == "" { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Account Address: it cannot be empty") + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Account Address: it cannot be empty") } _, err := sdk.AccAddressFromBech32(msg.Address) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Account Address: (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Account Address: (%s)", err) } if msg.Signer == "" { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: it cannot be empty") + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: it cannot be empty") } _, err = sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Signer: (%s)", err) } err = validator.Validate(msg) diff --git a/x/dclauth/types/message_reject_add_account.go b/x/dclauth/types/message_reject_add_account.go index 90035c9a3..a1874ed1c 100644 --- a/x/dclauth/types/message_reject_add_account.go +++ b/x/dclauth/types/message_reject_add_account.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" ) @@ -46,7 +47,7 @@ func (msg *MsgRejectAddAccount) GetSignBytes() []byte { func (msg *MsgRejectAddAccount) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } return nil diff --git a/x/dclauth/types/pending_account.pb.go b/x/dclauth/types/pending_account.pb.go index 2feda6e76..fb5cfb34e 100644 --- a/x/dclauth/types/pending_account.pb.go +++ b/x/dclauth/types/pending_account.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclauth/pending_account.proto +// source: zigbeealliance/distributedcomplianceledger/dclauth/pending_account.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -31,7 +31,7 @@ func (m *PendingAccount) Reset() { *m = PendingAccount{} } func (m *PendingAccount) String() string { return proto.CompactTextString(m) } func (*PendingAccount) ProtoMessage() {} func (*PendingAccount) Descriptor() ([]byte, []int) { - return fileDescriptor_8e7d87157315e5f1, []int{0} + return fileDescriptor_0beda4bc7537ece0, []int{0} } func (m *PendingAccount) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -64,25 +64,27 @@ func init() { proto.RegisterType((*PendingAccount)(nil), "zigbeealliance.distributedcomplianceledger.dclauth.PendingAccount") } -func init() { proto.RegisterFile("dclauth/pending_account.proto", fileDescriptor_8e7d87157315e5f1) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclauth/pending_account.proto", fileDescriptor_0beda4bc7537ece0) +} -var fileDescriptor_8e7d87157315e5f1 = []byte{ - // 228 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4d, 0x49, 0xce, 0x49, - 0x2c, 0x2d, 0xc9, 0xd0, 0x2f, 0x48, 0xcd, 0x4b, 0xc9, 0xcc, 0x4b, 0x8f, 0x4f, 0x4c, 0x4e, 0xce, - 0x2f, 0xcd, 0x2b, 0xd1, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x32, 0xaa, 0xca, 0x4c, 0x4f, 0x4a, - 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, 0xca, - 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, 0xa7, - 0x16, 0xe9, 0x41, 0x4d, 0x92, 0x12, 0x49, 0xcf, 0x4f, 0xcf, 0x07, 0x6b, 0xd7, 0x07, 0xb1, 0x20, - 0x26, 0x49, 0x89, 0xc2, 0x2c, 0x42, 0xb1, 0x40, 0x29, 0x97, 0x8b, 0x2f, 0x00, 0x62, 0xb3, 0x23, - 0x44, 0x5c, 0x28, 0x9a, 0x8b, 0x1d, 0xaa, 0x44, 0x82, 0x51, 0x81, 0x51, 0x83, 0xdb, 0xc8, 0x5a, - 0x8f, 0x74, 0x47, 0xe8, 0x41, 0x4d, 0x73, 0x62, 0xb9, 0x70, 0x4f, 0x9e, 0x31, 0x08, 0x66, 0xa2, - 0x53, 0xd2, 0x89, 0x47, 0x72, 0x8c, 0x17, 0x1e, 0xc9, 0x31, 0x3e, 0x78, 0x24, 0xc7, 0x38, 0xe1, - 0xb1, 0x1c, 0xc3, 0x85, 0xc7, 0x72, 0x0c, 0x37, 0x1e, 0xcb, 0x31, 0x44, 0x79, 0xa4, 0x67, 0x96, - 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0x43, 0xec, 0xd3, 0x85, 0x59, 0xa8, 0x8f, 0x64, - 0xa1, 0x2e, 0xc2, 0x46, 0x5d, 0x88, 0x95, 0xfa, 0x15, 0xfa, 0x30, 0xaf, 0x95, 0x54, 0x16, 0xa4, - 0x16, 0x27, 0xb1, 0x81, 0x7d, 0x66, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0xf9, 0x0c, 0xb3, 0x09, - 0x5b, 0x01, 0x00, 0x00, +var fileDescriptor_0beda4bc7537ece0 = []byte{ + // 231 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xf2, 0xa8, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0xa7, 0x24, 0xe7, 0x24, 0x96, 0x96, 0x64, 0xe8, 0x17, 0xa4, 0xe6, 0xa5, 0x64, + 0xe6, 0xa5, 0xc7, 0x27, 0x26, 0x27, 0xe7, 0x97, 0xe6, 0x95, 0xe8, 0x15, 0x14, 0xe5, 0x97, 0xe4, + 0x0b, 0x19, 0xa1, 0x9a, 0xa4, 0x87, 0xc7, 0x24, 0x3d, 0xa8, 0x49, 0x52, 0x22, 0xe9, 0xf9, 0xe9, + 0xf9, 0x60, 0xed, 0xfa, 0x20, 0x16, 0xc4, 0x24, 0x29, 0x07, 0x32, 0xdc, 0x84, 0xe2, 0x16, 0xa5, + 0x5c, 0x2e, 0xbe, 0x00, 0x88, 0x23, 0x1d, 0x21, 0xe2, 0x42, 0xd1, 0x5c, 0xec, 0x50, 0x25, 0x12, + 0x8c, 0x0a, 0x8c, 0x1a, 0xdc, 0x46, 0xd6, 0x7a, 0xa4, 0xbb, 0x57, 0x0f, 0x6a, 0x9a, 0x13, 0xcb, + 0x85, 0x7b, 0xf2, 0x8c, 0x41, 0x30, 0x13, 0x9d, 0x92, 0x4e, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, + 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x09, 0x8f, 0xe5, 0x18, 0x2e, 0x3c, 0x96, 0x63, 0xb8, 0xf1, + 0x58, 0x8e, 0x21, 0xca, 0x23, 0x3d, 0xb3, 0x24, 0xa3, 0x34, 0x49, 0x2f, 0x39, 0x3f, 0x57, 0x1f, + 0x62, 0x9f, 0x2e, 0x36, 0x6f, 0xe9, 0x22, 0x6c, 0xd4, 0x85, 0x7a, 0xac, 0x02, 0xee, 0xb5, 0x92, + 0xca, 0x82, 0xd4, 0xe2, 0x24, 0x36, 0xb0, 0xcf, 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x25, + 0xd3, 0x88, 0xfd, 0xb1, 0x01, 0x00, 0x00, } func (m *PendingAccount) Marshal() (dAtA []byte, err error) { diff --git a/x/dclauth/types/pending_account_revocation.pb.go b/x/dclauth/types/pending_account_revocation.pb.go index 67514f290..97216b7e9 100644 --- a/x/dclauth/types/pending_account_revocation.pb.go +++ b/x/dclauth/types/pending_account_revocation.pb.go @@ -1,13 +1,13 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclauth/pending_account_revocation.proto +// source: zigbeealliance/distributedcomplianceledger/dclauth/pending_account_revocation.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -32,7 +32,7 @@ type PendingAccountRevocation struct { func (m *PendingAccountRevocation) Reset() { *m = PendingAccountRevocation{} } func (*PendingAccountRevocation) ProtoMessage() {} func (*PendingAccountRevocation) Descriptor() ([]byte, []int) { - return fileDescriptor_be1e09fa9f023dd9, []int{0} + return fileDescriptor_4187fdc153f14fe6, []int{0} } func (m *PendingAccountRevocation) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -80,30 +80,31 @@ func init() { } func init() { - proto.RegisterFile("dclauth/pending_account_revocation.proto", fileDescriptor_be1e09fa9f023dd9) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclauth/pending_account_revocation.proto", fileDescriptor_4187fdc153f14fe6) } -var fileDescriptor_be1e09fa9f023dd9 = []byte{ - // 304 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x90, 0xb1, 0x4e, 0xeb, 0x30, - 0x14, 0x86, 0xe3, 0x7b, 0x11, 0xa8, 0x61, 0x2b, 0x1d, 0x42, 0x07, 0x53, 0x31, 0x75, 0x89, 0x2d, - 0x95, 0x09, 0xb6, 0x76, 0x81, 0x11, 0x95, 0x01, 0x89, 0xa5, 0x72, 0x6c, 0xcb, 0xb5, 0x94, 0xfa, - 0x58, 0xb6, 0x53, 0x01, 0x4f, 0xc1, 0xc8, 0xc8, 0x0b, 0xb0, 0xf1, 0x10, 0x8c, 0x15, 0x13, 0x23, - 0x6a, 0x5e, 0x04, 0xb5, 0x4e, 0x28, 0x2b, 0x5b, 0x72, 0x7e, 0xfb, 0xff, 0x3e, 0x9f, 0x74, 0x28, - 0x78, 0xc9, 0xaa, 0x30, 0xa7, 0x56, 0x1a, 0xa1, 0x8d, 0x9a, 0x31, 0xce, 0xa1, 0x32, 0x61, 0xe6, - 0xe4, 0x12, 0x38, 0x0b, 0x1a, 0x0c, 0xb1, 0x0e, 0x02, 0x74, 0x47, 0x8f, 0x5a, 0x15, 0x52, 0xb2, - 0xb2, 0xd4, 0xcc, 0x70, 0x49, 0x84, 0xf6, 0xc1, 0xe9, 0xa2, 0x0a, 0x52, 0x70, 0x58, 0xd8, 0x38, - 0x2d, 0xa5, 0x50, 0xd2, 0x91, 0xa6, 0xb4, 0xdf, 0x53, 0xa0, 0x60, 0x7b, 0x9d, 0x6e, 0xbe, 0x62, - 0x53, 0xff, 0x98, 0x83, 0x5f, 0x80, 0x9f, 0xc5, 0x20, 0xfe, 0x34, 0xd1, 0x51, 0xab, 0xa3, 0x1c, - 0x33, 0x21, 0x0e, 0x4f, 0x5f, 0x51, 0x9a, 0x5d, 0x47, 0xbd, 0x71, 0xb4, 0x9b, 0xfe, 0xc8, 0x75, - 0x47, 0xe9, 0x01, 0x13, 0xc2, 0x49, 0xef, 0x33, 0x34, 0x40, 0xc3, 0xce, 0x24, 0xfb, 0x78, 0xcb, - 0x7b, 0x4d, 0xe9, 0x38, 0x26, 0x37, 0xc1, 0x69, 0xa3, 0xa6, 0xed, 0xc1, 0xee, 0x6d, 0xda, 0x61, - 0xd6, 0x3a, 0x58, 0xb2, 0xd2, 0x67, 0xff, 0x06, 0xff, 0x87, 0x87, 0xa3, 0x73, 0xf2, 0xf7, 0xe7, - 0x91, 0xcb, 0x8d, 0xe4, 0x74, 0xd7, 0x75, 0xb1, 0xf7, 0xfc, 0x72, 0x92, 0x4c, 0x8a, 0xf7, 0x35, - 0x46, 0xab, 0x35, 0x46, 0x5f, 0x6b, 0x8c, 0x9e, 0x6a, 0x9c, 0xac, 0x6a, 0x9c, 0x7c, 0xd6, 0x38, - 0xb9, 0xbb, 0x52, 0x3a, 0xcc, 0xab, 0x82, 0x70, 0x58, 0xd0, 0xc8, 0xcb, 0x5b, 0x20, 0xfd, 0x05, - 0xcc, 0x77, 0xc4, 0x3c, 0x22, 0xe9, 0x3d, 0x6d, 0x37, 0x13, 0x1e, 0xac, 0xf4, 0xc5, 0xfe, 0x76, - 0x35, 0x67, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x16, 0x7c, 0x38, 0xf8, 0xc0, 0x01, 0x00, 0x00, +var fileDescriptor_4187fdc153f14fe6 = []byte{ + // 311 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x0a, 0xae, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0xa7, 0x24, 0xe7, 0x24, 0x96, 0x96, 0x64, 0xe8, 0x17, 0xa4, 0xe6, 0xa5, 0x64, + 0xe6, 0xa5, 0xc7, 0x27, 0x26, 0x27, 0xe7, 0x97, 0xe6, 0x95, 0xc4, 0x17, 0xa5, 0x96, 0xe5, 0x27, + 0x27, 0x96, 0x64, 0xe6, 0xe7, 0xe9, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x19, 0xa1, 0x1a, 0xaa, + 0x87, 0xc7, 0x50, 0x3d, 0xa8, 0xa1, 0x52, 0x22, 0xe9, 0xf9, 0xe9, 0xf9, 0x60, 0xed, 0xfa, 0x20, + 0x16, 0xc4, 0x24, 0x29, 0xc9, 0xe4, 0xfc, 0xe2, 0xdc, 0xfc, 0xe2, 0x78, 0x88, 0x04, 0x84, 0x03, + 0x95, 0xb2, 0x23, 0xc3, 0xe5, 0xe9, 0x45, 0x89, 0x79, 0x25, 0x10, 0xfd, 0x4a, 0x6b, 0x19, 0xb9, + 0x24, 0x02, 0x20, 0x3e, 0x71, 0x84, 0x78, 0x24, 0x08, 0xee, 0x0f, 0x21, 0x23, 0x2e, 0xf6, 0xc4, + 0x94, 0x94, 0xa2, 0xd4, 0xe2, 0x62, 0x09, 0x46, 0x05, 0x46, 0x0d, 0x4e, 0x27, 0x89, 0x4b, 0x5b, + 0x74, 0x45, 0xa0, 0xf6, 0x3b, 0x42, 0x64, 0x82, 0x4b, 0x8a, 0x32, 0xf3, 0xd2, 0x83, 0x60, 0x0a, + 0x85, 0xc2, 0xb9, 0x38, 0x13, 0x0b, 0x0a, 0x8a, 0xf2, 0xcb, 0x12, 0x73, 0x8a, 0x25, 0x98, 0x14, + 0x98, 0x35, 0xb8, 0x8d, 0x2c, 0xf5, 0x48, 0x0f, 0x09, 0x3d, 0x77, 0x90, 0x23, 0x83, 0x10, 0x66, + 0x59, 0xb1, 0xcc, 0x58, 0x20, 0xcf, 0xe0, 0x94, 0x74, 0xe2, 0x91, 0x1c, 0xe3, 0x85, 0x47, 0x72, + 0x8c, 0x0f, 0x1e, 0xc9, 0x31, 0x4e, 0x78, 0x2c, 0xc7, 0x70, 0xe1, 0xb1, 0x1c, 0xc3, 0x8d, 0xc7, + 0x72, 0x0c, 0x51, 0x1e, 0xe9, 0x99, 0x25, 0x19, 0xa5, 0x49, 0x7a, 0xc9, 0xf9, 0xb9, 0xfa, 0x10, + 0xfb, 0x74, 0xb1, 0x85, 0x8a, 0x2e, 0xc2, 0x46, 0x5d, 0x68, 0xb8, 0x54, 0xc0, 0x43, 0xa6, 0xa4, + 0xb2, 0x20, 0xb5, 0x38, 0x89, 0x0d, 0x1c, 0x34, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x22, + 0x02, 0xac, 0x8a, 0x16, 0x02, 0x00, 0x00, } func (m *PendingAccountRevocation) Marshal() (dAtA []byte, err error) { diff --git a/x/dclauth/types/query.pb.go b/x/dclauth/types/query.pb.go index 5bc9474cb..cec439b32 100644 --- a/x/dclauth/types/query.pb.go +++ b/x/dclauth/types/query.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclauth/query.proto +// source: zigbeealliance/distributedcomplianceledger/dclauth/query.proto package types @@ -8,9 +8,9 @@ import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" query "github.com/cosmos/cosmos-sdk/types/query" - _ "github.com/gogo/protobuf/gogoproto" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -39,7 +39,7 @@ func (m *QueryGetAccountRequest) Reset() { *m = QueryGetAccountRequest{} func (m *QueryGetAccountRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetAccountRequest) ProtoMessage() {} func (*QueryGetAccountRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{0} + return fileDescriptor_4b6c889ea675319a, []int{0} } func (m *QueryGetAccountRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -83,7 +83,7 @@ func (m *QueryGetAccountResponse) Reset() { *m = QueryGetAccountResponse func (m *QueryGetAccountResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetAccountResponse) ProtoMessage() {} func (*QueryGetAccountResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{1} + return fileDescriptor_4b6c889ea675319a, []int{1} } func (m *QueryGetAccountResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -127,7 +127,7 @@ func (m *QueryAllAccountRequest) Reset() { *m = QueryAllAccountRequest{} func (m *QueryAllAccountRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllAccountRequest) ProtoMessage() {} func (*QueryAllAccountRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{2} + return fileDescriptor_4b6c889ea675319a, []int{2} } func (m *QueryAllAccountRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -172,7 +172,7 @@ func (m *QueryAllAccountResponse) Reset() { *m = QueryAllAccountResponse func (m *QueryAllAccountResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllAccountResponse) ProtoMessage() {} func (*QueryAllAccountResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{3} + return fileDescriptor_4b6c889ea675319a, []int{3} } func (m *QueryAllAccountResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -223,7 +223,7 @@ func (m *QueryGetPendingAccountRequest) Reset() { *m = QueryGetPendingAc func (m *QueryGetPendingAccountRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetPendingAccountRequest) ProtoMessage() {} func (*QueryGetPendingAccountRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{4} + return fileDescriptor_4b6c889ea675319a, []int{4} } func (m *QueryGetPendingAccountRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -267,7 +267,7 @@ func (m *QueryGetPendingAccountResponse) Reset() { *m = QueryGetPendingA func (m *QueryGetPendingAccountResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetPendingAccountResponse) ProtoMessage() {} func (*QueryGetPendingAccountResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{5} + return fileDescriptor_4b6c889ea675319a, []int{5} } func (m *QueryGetPendingAccountResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -311,7 +311,7 @@ func (m *QueryAllPendingAccountRequest) Reset() { *m = QueryAllPendingAc func (m *QueryAllPendingAccountRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllPendingAccountRequest) ProtoMessage() {} func (*QueryAllPendingAccountRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{6} + return fileDescriptor_4b6c889ea675319a, []int{6} } func (m *QueryAllPendingAccountRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -356,7 +356,7 @@ func (m *QueryAllPendingAccountResponse) Reset() { *m = QueryAllPendingA func (m *QueryAllPendingAccountResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllPendingAccountResponse) ProtoMessage() {} func (*QueryAllPendingAccountResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{7} + return fileDescriptor_4b6c889ea675319a, []int{7} } func (m *QueryAllPendingAccountResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -409,7 +409,7 @@ func (m *QueryGetPendingAccountRevocationRequest) Reset() { func (m *QueryGetPendingAccountRevocationRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetPendingAccountRevocationRequest) ProtoMessage() {} func (*QueryGetPendingAccountRevocationRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{8} + return fileDescriptor_4b6c889ea675319a, []int{8} } func (m *QueryGetPendingAccountRevocationRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -455,7 +455,7 @@ func (m *QueryGetPendingAccountRevocationResponse) Reset() { func (m *QueryGetPendingAccountRevocationResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetPendingAccountRevocationResponse) ProtoMessage() {} func (*QueryGetPendingAccountRevocationResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{9} + return fileDescriptor_4b6c889ea675319a, []int{9} } func (m *QueryGetPendingAccountRevocationResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -501,7 +501,7 @@ func (m *QueryAllPendingAccountRevocationRequest) Reset() { func (m *QueryAllPendingAccountRevocationRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllPendingAccountRevocationRequest) ProtoMessage() {} func (*QueryAllPendingAccountRevocationRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{10} + return fileDescriptor_4b6c889ea675319a, []int{10} } func (m *QueryAllPendingAccountRevocationRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -548,7 +548,7 @@ func (m *QueryAllPendingAccountRevocationResponse) Reset() { func (m *QueryAllPendingAccountRevocationResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllPendingAccountRevocationResponse) ProtoMessage() {} func (*QueryAllPendingAccountRevocationResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{11} + return fileDescriptor_4b6c889ea675319a, []int{11} } func (m *QueryAllPendingAccountRevocationResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -598,7 +598,7 @@ func (m *QueryGetAccountStatRequest) Reset() { *m = QueryGetAccountStatR func (m *QueryGetAccountStatRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetAccountStatRequest) ProtoMessage() {} func (*QueryGetAccountStatRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{12} + return fileDescriptor_4b6c889ea675319a, []int{12} } func (m *QueryGetAccountStatRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -635,7 +635,7 @@ func (m *QueryGetAccountStatResponse) Reset() { *m = QueryGetAccountStat func (m *QueryGetAccountStatResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetAccountStatResponse) ProtoMessage() {} func (*QueryGetAccountStatResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{13} + return fileDescriptor_4b6c889ea675319a, []int{13} } func (m *QueryGetAccountStatResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -679,7 +679,7 @@ func (m *QueryGetRevokedAccountRequest) Reset() { *m = QueryGetRevokedAc func (m *QueryGetRevokedAccountRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetRevokedAccountRequest) ProtoMessage() {} func (*QueryGetRevokedAccountRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{14} + return fileDescriptor_4b6c889ea675319a, []int{14} } func (m *QueryGetRevokedAccountRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -723,7 +723,7 @@ func (m *QueryGetRevokedAccountResponse) Reset() { *m = QueryGetRevokedA func (m *QueryGetRevokedAccountResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetRevokedAccountResponse) ProtoMessage() {} func (*QueryGetRevokedAccountResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{15} + return fileDescriptor_4b6c889ea675319a, []int{15} } func (m *QueryGetRevokedAccountResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -767,7 +767,7 @@ func (m *QueryAllRevokedAccountRequest) Reset() { *m = QueryAllRevokedAc func (m *QueryAllRevokedAccountRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllRevokedAccountRequest) ProtoMessage() {} func (*QueryAllRevokedAccountRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{16} + return fileDescriptor_4b6c889ea675319a, []int{16} } func (m *QueryAllRevokedAccountRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -812,7 +812,7 @@ func (m *QueryAllRevokedAccountResponse) Reset() { *m = QueryAllRevokedA func (m *QueryAllRevokedAccountResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllRevokedAccountResponse) ProtoMessage() {} func (*QueryAllRevokedAccountResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{17} + return fileDescriptor_4b6c889ea675319a, []int{17} } func (m *QueryAllRevokedAccountResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -863,7 +863,7 @@ func (m *QueryGetRejectedAccountRequest) Reset() { *m = QueryGetRejected func (m *QueryGetRejectedAccountRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetRejectedAccountRequest) ProtoMessage() {} func (*QueryGetRejectedAccountRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{18} + return fileDescriptor_4b6c889ea675319a, []int{18} } func (m *QueryGetRejectedAccountRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -907,7 +907,7 @@ func (m *QueryGetRejectedAccountResponse) Reset() { *m = QueryGetRejecte func (m *QueryGetRejectedAccountResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetRejectedAccountResponse) ProtoMessage() {} func (*QueryGetRejectedAccountResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{19} + return fileDescriptor_4b6c889ea675319a, []int{19} } func (m *QueryGetRejectedAccountResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -951,7 +951,7 @@ func (m *QueryAllRejectedAccountRequest) Reset() { *m = QueryAllRejected func (m *QueryAllRejectedAccountRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllRejectedAccountRequest) ProtoMessage() {} func (*QueryAllRejectedAccountRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{20} + return fileDescriptor_4b6c889ea675319a, []int{20} } func (m *QueryAllRejectedAccountRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -996,7 +996,7 @@ func (m *QueryAllRejectedAccountResponse) Reset() { *m = QueryAllRejecte func (m *QueryAllRejectedAccountResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllRejectedAccountResponse) ProtoMessage() {} func (*QueryAllRejectedAccountResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ea8dcf317d352d49, []int{21} + return fileDescriptor_4b6c889ea675319a, []int{21} } func (m *QueryAllRejectedAccountResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1064,77 +1064,79 @@ func init() { proto.RegisterType((*QueryAllRejectedAccountResponse)(nil), "zigbeealliance.distributedcomplianceledger.dclauth.QueryAllRejectedAccountResponse") } -func init() { proto.RegisterFile("dclauth/query.proto", fileDescriptor_ea8dcf317d352d49) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclauth/query.proto", fileDescriptor_4b6c889ea675319a) +} -var fileDescriptor_ea8dcf317d352d49 = []byte{ - // 1059 bytes of a gzipped FileDescriptorProto +var fileDescriptor_4b6c889ea675319a = []byte{ + // 1068 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x98, 0xcf, 0x6f, 0xdc, 0x44, - 0x14, 0xc7, 0x33, 0xbb, 0x40, 0xc4, 0x54, 0x4a, 0xd5, 0x69, 0xa0, 0x5b, 0x27, 0x71, 0x90, 0x29, - 0x49, 0x84, 0x58, 0x9b, 0x2e, 0x17, 0x7e, 0x1c, 0xd0, 0x06, 0x89, 0x22, 0xa8, 0xa0, 0xd9, 0xdc, - 0x08, 0x28, 0x78, 0xed, 0x91, 0x63, 0x70, 0x3c, 0x8e, 0x3d, 0x1b, 0x51, 0x10, 0x17, 0x84, 0x10, + 0x14, 0xc7, 0x33, 0xbb, 0x40, 0xc4, 0x54, 0x4a, 0xd5, 0x21, 0xa2, 0x5b, 0x27, 0xdd, 0x20, 0x53, + 0x92, 0x08, 0xb1, 0x36, 0x5d, 0x2e, 0xfc, 0x90, 0x80, 0x0d, 0x82, 0x16, 0xa8, 0xa0, 0xd9, 0xbd, + 0x11, 0x50, 0xf0, 0xda, 0x23, 0xc7, 0xe0, 0x78, 0x1c, 0xdb, 0x1b, 0x51, 0x10, 0x17, 0x84, 0x10, 0x27, 0x40, 0x20, 0xf1, 0x97, 0x70, 0xe3, 0x80, 0xb8, 0xe5, 0x46, 0x05, 0x17, 0x24, 0xa4, 0x16, - 0x25, 0x80, 0x04, 0x7f, 0x05, 0xb2, 0xe7, 0x59, 0xfe, 0xb1, 0xe3, 0x34, 0x8d, 0x67, 0xb9, 0x6d, - 0x3c, 0x9e, 0xef, 0x9b, 0xcf, 0xf7, 0xbd, 0x7d, 0xf3, 0xb2, 0xf8, 0xb2, 0xeb, 0x04, 0xf6, 0x84, - 0xef, 0x59, 0x07, 0x13, 0x1a, 0xdf, 0x36, 0xa3, 0x98, 0x71, 0x46, 0x06, 0x1f, 0xf9, 0xde, 0x98, - 0x52, 0x3b, 0x08, 0x7c, 0x3b, 0x74, 0xa8, 0xe9, 0xfa, 0x09, 0x8f, 0xfd, 0xf1, 0x84, 0x53, 0xd7, - 0x61, 0xfb, 0x91, 0x78, 0x1a, 0x50, 0xd7, 0xa3, 0xb1, 0x09, 0xfb, 0xb5, 0x65, 0x8f, 0x31, 0x2f, - 0xa0, 0x96, 0x1d, 0xf9, 0x96, 0x1d, 0x86, 0x8c, 0xdb, 0xdc, 0x67, 0x61, 0x22, 0x14, 0xb5, 0xa7, - 0x1d, 0x96, 0xec, 0xb3, 0xc4, 0x1a, 0xdb, 0x09, 0x15, 0xa1, 0xac, 0xc3, 0xeb, 0x63, 0xca, 0xed, - 0xeb, 0x56, 0x64, 0x7b, 0x7e, 0x98, 0xbd, 0x0c, 0xef, 0x3e, 0x96, 0x1f, 0xc9, 0x76, 0x1c, 0x36, - 0x09, 0x39, 0x3c, 0x5e, 0xc9, 0x1f, 0x47, 0x34, 0x74, 0xfd, 0xd0, 0xdb, 0xad, 0x2e, 0x6f, 0x34, - 0x2c, 0xef, 0xc6, 0xf4, 0x90, 0x39, 0x65, 0x7d, 0xad, 0xa6, 0xbf, 0x9b, 0x70, 0x7b, 0x2a, 0x48, - 0xba, 0xeb, 0x03, 0xea, 0xd6, 0x82, 0xe8, 0xc5, 0xf2, 0xfb, 0xd4, 0xe1, 0x53, 0xeb, 0x8b, 0x1e, - 0xf3, 0x58, 0xf6, 0xd1, 0x4a, 0x3f, 0xc1, 0xd3, 0xab, 0x02, 0x7e, 0x57, 0x2c, 0x88, 0x3f, 0xc4, - 0x92, 0x71, 0x13, 0x3f, 0xbe, 0x95, 0xba, 0x71, 0x83, 0xf2, 0xa1, 0x50, 0x1a, 0xd1, 0x83, 0x09, - 0x4d, 0x38, 0x19, 0xe0, 0x79, 0xdb, 0x75, 0x63, 0x9a, 0x24, 0x3d, 0xf4, 0x04, 0xda, 0x78, 0x74, - 0xb3, 0xf7, 0xcb, 0xf7, 0xfd, 0x45, 0xd8, 0x3c, 0x14, 0x2b, 0xdb, 0x3c, 0xf6, 0x43, 0x6f, 0x94, - 0xbf, 0x68, 0x1c, 0xe2, 0x2b, 0x53, 0x6a, 0x49, 0xc4, 0xc2, 0x84, 0x92, 0x1d, 0x3c, 0x0f, 0x47, - 0xcd, 0xe4, 0x2e, 0x0c, 0x5e, 0x32, 0x1f, 0x3c, 0xc9, 0x26, 0xa8, 0x6e, 0x3e, 0x74, 0x74, 0x77, - 0x75, 0x6e, 0x94, 0x2b, 0x1a, 0xef, 0x01, 0xc5, 0x30, 0x08, 0x6a, 0x14, 0xaf, 0x62, 0x5c, 0xe4, - 0x17, 0x22, 0xaf, 0x99, 0x40, 0x91, 0x16, 0x83, 0x29, 0xea, 0x0e, 0x8a, 0xc1, 0xbc, 0x65, 0x7b, - 0x14, 0xf6, 0x8e, 0x4a, 0x3b, 0x8d, 0x1f, 0x11, 0xa0, 0x95, 0x43, 0xc8, 0xd0, 0xba, 0x6a, 0xd1, - 0xc8, 0x8d, 0x0a, 0x40, 0x27, 0x03, 0x58, 0xbf, 0x2f, 0x80, 0x38, 0x59, 0x85, 0x60, 0x1b, 0xaf, - 0xe4, 0xb9, 0xb9, 0x25, 0x2a, 0x54, 0x41, 0xc2, 0xbf, 0x41, 0x58, 0x6f, 0x52, 0x05, 0x77, 0x22, - 0xbc, 0x10, 0x55, 0x56, 0x20, 0x0b, 0x9b, 0xe7, 0x31, 0xa9, 0x1a, 0x03, 0xbc, 0xaa, 0xe9, 0x1b, - 0x1e, 0x90, 0x0e, 0x83, 0x40, 0x4e, 0xaa, 0xaa, 0x28, 0x7e, 0xcf, 0xe9, 0x25, 0x91, 0x4e, 0xa1, - 0xef, 0xce, 0x92, 0x5e, 0x5d, 0xc1, 0xbc, 0x8b, 0xd7, 0x9b, 0x52, 0x9b, 0x37, 0xb4, 0x36, 0xa5, - 0xf3, 0x03, 0xc2, 0x1b, 0xf7, 0xd7, 0x07, 0x1b, 0xbf, 0x44, 0xb8, 0x17, 0x35, 0xbc, 0x04, 0x09, - 0xbc, 0xd9, 0xde, 0xd1, 0x42, 0x13, 0xbc, 0x6d, 0x8c, 0x69, 0x1c, 0x80, 0x39, 0x92, 0xcc, 0xd7, - 0xcd, 0x51, 0x55, 0x6d, 0x9f, 0x75, 0xc0, 0xb0, 0x53, 0x63, 0x9e, 0xcd, 0xb0, 0xee, 0xff, 0x6d, - 0x98, 0xba, 0xb2, 0x5c, 0xc6, 0x5a, 0xed, 0x8e, 0xd9, 0xe6, 0x76, 0xfe, 0xd5, 0x36, 0x3e, 0x47, - 0x78, 0x49, 0xba, 0x0c, 0xbe, 0x78, 0xf8, 0x42, 0xe9, 0x31, 0x64, 0xe3, 0xe5, 0x16, 0xfd, 0x3a, - 0x95, 0x01, 0xf8, 0xb2, 0xb2, 0xf1, 0x42, 0xd1, 0x6e, 0x47, 0xe2, 0x2a, 0xaf, 0x35, 0xa1, 0x5e, - 0xed, 0x3b, 0x23, 0x6f, 0xaa, 0xf5, 0xbd, 0x45, 0x5b, 0x89, 0x2b, 0x2b, 0x6d, 0x9a, 0x6a, 0x35, - 0x46, 0xde, 0x56, 0xaa, 0xfa, 0xe5, 0xa6, 0x2a, 0xe7, 0x99, 0x45, 0x53, 0x7d, 0x00, 0xfa, 0xee, - 0x2c, 0xe9, 0xd5, 0x55, 0xef, 0x8b, 0xe5, 0xd4, 0x8a, 0x11, 0xee, 0xcc, 0x75, 0xf1, 0x1d, 0xc2, - 0xab, 0x8d, 0x9b, 0xc1, 0x9a, 0x04, 0x5f, 0x8c, 0xab, 0x4b, 0x90, 0x8a, 0x57, 0xce, 0xe7, 0x4d, - 0x45, 0x0a, 0xcc, 0xa9, 0x47, 0x30, 0xf6, 0xca, 0x19, 0x93, 0x42, 0xa9, 0x2a, 0x8e, 0x7b, 0xb9, - 0x05, 0xb2, 0x50, 0xa7, 0x59, 0xd0, 0x9d, 0xad, 0x05, 0xca, 0x0a, 0x64, 0xf0, 0xef, 0x65, 0xfc, - 0x70, 0x46, 0x48, 0x8e, 0x10, 0x9e, 0xcf, 0xe5, 0x5f, 0x3f, 0xcf, 0xd1, 0xe5, 0x83, 0xbd, 0xf6, - 0x86, 0x12, 0x2d, 0x71, 0x74, 0xe3, 0xda, 0xa7, 0xbf, 0xfe, 0xf9, 0x6d, 0x47, 0x27, 0xcb, 0x96, - 0xeb, 0x04, 0x56, 0xf9, 0xbf, 0x9a, 0xc4, 0xfa, 0x18, 0x0a, 0xf7, 0x13, 0xf2, 0x13, 0xc2, 0x18, - 0x76, 0x0e, 0x83, 0xa0, 0x05, 0xcd, 0xd4, 0x80, 0xdf, 0x82, 0x66, 0x7a, 0x92, 0x37, 0xb4, 0x8c, - 0x66, 0x91, 0x90, 0x69, 0x1a, 0xf2, 0x37, 0xc2, 0x0b, 0xd5, 0xdb, 0x8f, 0x6c, 0xb5, 0x71, 0x52, - 0x3a, 0x9a, 0x6a, 0x23, 0x95, 0x92, 0x40, 0xd5, 0xcf, 0xa8, 0xd6, 0xc9, 0x53, 0x05, 0x55, 0x14, - 0xb3, 0x88, 0x25, 0xd4, 0xed, 0x4b, 0x92, 0x75, 0x0f, 0xe1, 0x4b, 0x55, 0xa5, 0x34, 0x67, 0x5b, - 0x6d, 0x7c, 0x56, 0xcd, 0xda, 0x38, 0x6f, 0x1b, 0x4f, 0x66, 0xac, 0x2b, 0x64, 0xe9, 0x14, 0x56, - 0xf2, 0x55, 0x07, 0xf7, 0x9a, 0x06, 0x19, 0xb2, 0xa3, 0x32, 0x03, 0xb5, 0x59, 0x50, 0x7b, 0x67, - 0x36, 0xe2, 0x00, 0xff, 0x7c, 0x06, 0x3f, 0x20, 0xcf, 0x4a, 0xe0, 0x8b, 0x5f, 0x21, 0x64, 0x39, - 0xff, 0xa2, 0x83, 0x97, 0x9a, 0xe4, 0xd3, 0xec, 0xef, 0xa8, 0x4c, 0x95, 0x3a, 0x53, 0xce, 0x30, - 0x09, 0x1b, 0x66, 0x66, 0xca, 0x06, 0x59, 0x3b, 0x9b, 0x29, 0xe4, 0x67, 0x54, 0x19, 0x11, 0xc9, - 0x9b, 0x0a, 0xda, 0x65, 0x69, 0x42, 0xd5, 0xde, 0x52, 0xa6, 0x07, 0x80, 0xab, 0x19, 0xe0, 0x55, - 0x72, 0x45, 0xd2, 0x82, 0x93, 0x94, 0xe0, 0x2f, 0x84, 0x17, 0xaa, 0x53, 0x4e, 0xbb, 0xce, 0x25, - 0x9d, 0xff, 0xda, 0x75, 0x2e, 0xf9, 0xa0, 0x67, 0x3c, 0x93, 0xa1, 0xad, 0x91, 0x6b, 0x05, 0x1a, - 0x0c, 0x66, 0xb2, 0x22, 0xbe, 0x8b, 0xf0, 0xa5, 0xaa, 0x50, 0xeb, 0xc6, 0xa5, 0x1a, 0xb5, 0x71, - 0xa6, 0x35, 0x8c, 0x0c, 0x75, 0x99, 0x68, 0xcd, 0xa8, 0xe4, 0x1f, 0x84, 0x2f, 0xd6, 0xe6, 0x11, - 0xd2, 0xd2, 0x76, 0xd9, 0xb4, 0xa6, 0x6d, 0x2b, 0xd5, 0x6c, 0xbe, 0x85, 0xf2, 0x21, 0x4a, 0x96, - 0xcc, 0x63, 0x84, 0x49, 0x4d, 0x2a, 0xcd, 0x66, 0x4b, 0xeb, 0x15, 0xe3, 0x36, 0x4f, 0xa1, 0xb2, - 0x8b, 0x68, 0x0a, 0x77, 0x73, 0x7c, 0x74, 0xac, 0xa3, 0x3b, 0xc7, 0x3a, 0xfa, 0xe3, 0x58, 0x47, - 0x5f, 0x9f, 0xe8, 0x73, 0x77, 0x4e, 0xf4, 0xb9, 0xdf, 0x4e, 0xf4, 0xb9, 0xb7, 0x5f, 0xf3, 0x7c, - 0xbe, 0x37, 0x19, 0x9b, 0x0e, 0xdb, 0xb7, 0xc4, 0xe9, 0xfa, 0xf9, 0xf1, 0xac, 0xd2, 0xf1, 0xfa, - 0xc5, 0xf9, 0xfa, 0xe2, 0x80, 0xd6, 0x87, 0x56, 0xfe, 0x1b, 0x31, 0xbf, 0x1d, 0xd1, 0x64, 0xfc, - 0x48, 0xf6, 0x43, 0xef, 0x73, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0xd5, 0x00, 0x82, 0x90, 0x69, - 0x17, 0x00, 0x00, + 0x25, 0x80, 0x04, 0x7f, 0x05, 0xb2, 0xe7, 0x99, 0xf5, 0x78, 0xc7, 0xdb, 0x64, 0x3d, 0xcb, 0x2d, + 0xf1, 0x78, 0xbe, 0x33, 0x9f, 0xef, 0x7b, 0xf3, 0xfc, 0x76, 0xf0, 0xf3, 0x1f, 0x7a, 0xee, 0x90, + 0x52, 0xcb, 0xf7, 0x3d, 0x2b, 0xb0, 0xa9, 0xe9, 0x78, 0x71, 0x12, 0x79, 0xc3, 0x51, 0x42, 0x1d, + 0x9b, 0xed, 0x87, 0xfc, 0xa9, 0x4f, 0x1d, 0x97, 0x46, 0xa6, 0x63, 0xfb, 0xd6, 0x28, 0xd9, 0x33, + 0x0f, 0x46, 0x34, 0xba, 0x65, 0x84, 0x11, 0x4b, 0x18, 0xe9, 0x8a, 0xf3, 0x8d, 0x29, 0xf3, 0x0d, + 0x98, 0xaf, 0xad, 0xba, 0x8c, 0xb9, 0x3e, 0x35, 0xad, 0xd0, 0x33, 0xad, 0x20, 0x60, 0x89, 0x95, + 0x78, 0x2c, 0x88, 0xb9, 0xa2, 0xf6, 0xb8, 0xcd, 0xe2, 0x7d, 0x16, 0x9b, 0x43, 0x2b, 0xa6, 0x7c, + 0x29, 0xf3, 0xf0, 0xea, 0x90, 0x26, 0xd6, 0x55, 0x33, 0xb4, 0x5c, 0x2f, 0xc8, 0x5e, 0x86, 0x77, + 0x5f, 0x9c, 0x61, 0xf7, 0x96, 0x6d, 0xb3, 0x51, 0x90, 0x80, 0xc2, 0xf5, 0x19, 0x14, 0x42, 0x1a, + 0x38, 0x5e, 0xe0, 0xee, 0x8a, 0x4a, 0x83, 0xfa, 0x4a, 0xbb, 0x11, 0x3d, 0x64, 0x76, 0x11, 0xf0, + 0xe5, 0xd9, 0x01, 0x77, 0xe3, 0xc4, 0xaa, 0x43, 0x99, 0xee, 0xe5, 0x7d, 0xea, 0x94, 0x28, 0x5f, + 0x9d, 0x49, 0xe9, 0x3d, 0x6a, 0x27, 0x13, 0x52, 0xcb, 0x2e, 0x73, 0x59, 0xf6, 0xa7, 0x99, 0xfe, + 0x05, 0x4f, 0x2f, 0xf1, 0xf0, 0xef, 0xf2, 0x01, 0xfe, 0x0f, 0x1f, 0xd2, 0x6f, 0xe0, 0x87, 0xb7, + 0xd3, 0x7c, 0xb8, 0x46, 0x93, 0x1e, 0x57, 0xea, 0xd3, 0x83, 0x11, 0x8d, 0x13, 0xd2, 0xc5, 0x8b, + 0x96, 0xe3, 0x44, 0x34, 0x8e, 0x5b, 0xe8, 0x11, 0xb4, 0xf9, 0xe0, 0x56, 0xeb, 0xe7, 0xef, 0x3a, + 0xcb, 0x30, 0xb9, 0xc7, 0x47, 0x06, 0x49, 0xe4, 0x05, 0x6e, 0x3f, 0x7f, 0x51, 0x3f, 0xc4, 0x17, + 0x27, 0xd4, 0xe2, 0x90, 0x05, 0x31, 0x25, 0x3b, 0x78, 0x11, 0xb6, 0x9a, 0xc9, 0x9d, 0xeb, 0x3e, + 0x67, 0x9c, 0x3d, 0xcd, 0x0d, 0x50, 0xdd, 0xba, 0xef, 0xe8, 0xce, 0xda, 0x42, 0x3f, 0x57, 0xd4, + 0xdf, 0x05, 0x8a, 0x9e, 0xef, 0x97, 0x28, 0x5e, 0xc1, 0x78, 0x9c, 0xe1, 0xb0, 0xf2, 0xba, 0x01, + 0x14, 0xe9, 0x71, 0x30, 0xf8, 0xc9, 0x83, 0xe3, 0x60, 0xdc, 0xb4, 0x5c, 0x0a, 0x73, 0xfb, 0x85, + 0x99, 0xfa, 0x0f, 0x08, 0xd0, 0x8a, 0x4b, 0xc8, 0xd0, 0x9a, 0x6a, 0xd1, 0xc8, 0x35, 0x01, 0xa0, + 0x91, 0x01, 0x6c, 0xdc, 0x13, 0x80, 0xef, 0x4c, 0x20, 0x18, 0xe0, 0xcb, 0x79, 0x6c, 0x6e, 0xf2, + 0x23, 0xa2, 0x20, 0xe0, 0x5f, 0x23, 0xdc, 0xae, 0x52, 0x05, 0x77, 0x42, 0xbc, 0x14, 0x0a, 0x23, + 0x10, 0x85, 0xad, 0x59, 0x4c, 0x12, 0xd7, 0x00, 0xaf, 0x4a, 0xfa, 0xba, 0x0b, 0xa4, 0x3d, 0xdf, + 0x97, 0x93, 0xaa, 0x4a, 0x8a, 0xdf, 0x72, 0x7a, 0xc9, 0x4a, 0x53, 0xe8, 0x9b, 0xf3, 0xa4, 0x57, + 0x97, 0x30, 0xef, 0xe0, 0x8d, 0xaa, 0xd0, 0xe6, 0x15, 0xb5, 0x4e, 0xea, 0x7c, 0x8f, 0xf0, 0xe6, + 0xbd, 0xf5, 0xc1, 0xc6, 0x2f, 0x10, 0x6e, 0x85, 0x15, 0x2f, 0x41, 0x00, 0x6f, 0xd4, 0x77, 0x74, + 0xac, 0x09, 0xde, 0x56, 0xae, 0xa9, 0x1f, 0x80, 0x39, 0x92, 0xc8, 0x97, 0xcd, 0x51, 0x95, 0x6d, + 0x9f, 0x36, 0xc0, 0xb0, 0xa9, 0x6b, 0x9e, 0xce, 0xb0, 0xe6, 0xff, 0x6d, 0x98, 0xba, 0xb4, 0x5c, + 0xc5, 0x5a, 0xe9, 0x1b, 0x33, 0x48, 0xac, 0xfc, 0x68, 0xeb, 0x9f, 0x21, 0xbc, 0x22, 0x1d, 0x06, + 0x5f, 0x5c, 0x7c, 0xae, 0xf0, 0x18, 0xa2, 0xf1, 0x42, 0x8d, 0x7a, 0x9d, 0xca, 0x00, 0x7c, 0x51, + 0x59, 0x7f, 0x66, 0x5c, 0x6e, 0xfb, 0xfc, 0xab, 0x5f, 0x2a, 0x42, 0xad, 0xd2, 0x99, 0x91, 0x17, + 0xd5, 0xf2, 0xdc, 0x71, 0x59, 0x89, 0x84, 0x91, 0x3a, 0x45, 0x55, 0x5c, 0x23, 0x2f, 0x2b, 0xa2, + 0x7e, 0xb1, 0xa8, 0xca, 0x79, 0xe6, 0x51, 0x54, 0xcf, 0x40, 0xdf, 0x9c, 0x27, 0xbd, 0xba, 0xec, + 0x7d, 0xb6, 0x18, 0x5a, 0xde, 0xc2, 0x9d, 0x3a, 0x2f, 0xbe, 0x45, 0x78, 0xad, 0x72, 0x32, 0x58, + 0x13, 0xe3, 0xf3, 0x91, 0x38, 0x04, 0xa1, 0x78, 0x69, 0x36, 0x6f, 0x04, 0x29, 0x30, 0xa7, 0xbc, + 0x82, 0xbe, 0x57, 0x8c, 0x98, 0x14, 0x4a, 0x55, 0x72, 0xdc, 0xcd, 0x2d, 0x90, 0x2d, 0x35, 0xcd, + 0x82, 0xe6, 0x7c, 0x2d, 0x50, 0x96, 0x20, 0xdd, 0x7f, 0x1e, 0xc2, 0xf7, 0x67, 0x84, 0xe4, 0x08, + 0xe1, 0xc5, 0x5c, 0xfe, 0xb5, 0x59, 0xb6, 0x2e, 0x6f, 0xec, 0xb5, 0xd7, 0x95, 0x68, 0xf1, 0xad, + 0xeb, 0x57, 0x3e, 0xf9, 0xe5, 0x8f, 0x6f, 0x1a, 0x6d, 0xb2, 0x9a, 0xfe, 0x32, 0x31, 0x8b, 0xbf, + 0x95, 0x62, 0xf3, 0x23, 0x48, 0xdc, 0x8f, 0xc9, 0x8f, 0x08, 0x63, 0x98, 0xd9, 0xf3, 0xfd, 0x1a, + 0x34, 0x13, 0x0d, 0x7e, 0x0d, 0x9a, 0xc9, 0x4e, 0x5e, 0xd7, 0x32, 0x9a, 0x65, 0x42, 0x26, 0x69, + 0xc8, 0x5f, 0x08, 0x2f, 0x89, 0x5f, 0x3f, 0xb2, 0x5d, 0xc7, 0x49, 0x69, 0x6b, 0xaa, 0xf5, 0x55, + 0x4a, 0x02, 0x55, 0x27, 0xa3, 0xda, 0x20, 0x8f, 0x8d, 0xa9, 0xc2, 0x88, 0x85, 0x2c, 0xa6, 0x4e, + 0x47, 0x12, 0xac, 0xbb, 0x08, 0x5f, 0x10, 0x95, 0xd2, 0x98, 0x6d, 0xd7, 0xf1, 0x59, 0x35, 0x6b, + 0x65, 0xbf, 0xad, 0x3f, 0x9a, 0xb1, 0x5e, 0x26, 0x2b, 0x53, 0x58, 0xc9, 0x97, 0x0d, 0xdc, 0xaa, + 0x6a, 0x64, 0xc8, 0x8e, 0xca, 0x08, 0x94, 0x7a, 0x41, 0xed, 0xed, 0xf9, 0x88, 0x03, 0xfc, 0xd3, + 0x19, 0x7c, 0x97, 0x3c, 0x29, 0x81, 0x1f, 0x5f, 0x83, 0xc8, 0x62, 0xfe, 0x79, 0x03, 0xaf, 0x54, + 0xc9, 0xa7, 0xd1, 0xdf, 0x51, 0x19, 0x2a, 0x75, 0xa6, 0x9c, 0xa2, 0x13, 0xd6, 0x8d, 0xcc, 0x94, + 0x4d, 0xb2, 0x7e, 0x3a, 0x53, 0xc8, 0x4f, 0x48, 0x68, 0x11, 0xc9, 0x1b, 0x0a, 0xca, 0x65, 0xa1, + 0x43, 0xd5, 0xde, 0x54, 0xa6, 0x07, 0x80, 0x6b, 0x19, 0xe0, 0x25, 0x72, 0x51, 0x52, 0x82, 0xe3, + 0x94, 0xe0, 0x4f, 0x84, 0x97, 0xc4, 0x2e, 0xa7, 0x5e, 0xe5, 0x92, 0xf6, 0x7f, 0xf5, 0x2a, 0x97, + 0xbc, 0xd1, 0xd3, 0x9f, 0xc8, 0xd0, 0xd6, 0xc9, 0x95, 0x31, 0x1a, 0x34, 0x66, 0xb2, 0x24, 0xbe, + 0x83, 0xf0, 0x05, 0x51, 0xa8, 0x76, 0xe1, 0x52, 0x8d, 0x5a, 0xd9, 0xd3, 0xea, 0x7a, 0x86, 0xba, + 0x4a, 0xb4, 0x6a, 0x54, 0xf2, 0x37, 0xc2, 0xe7, 0x4b, 0xfd, 0x08, 0xa9, 0x69, 0xbb, 0xac, 0x5b, + 0xd3, 0x06, 0x4a, 0x35, 0xab, 0xbf, 0x42, 0x79, 0x13, 0x25, 0x0b, 0xe6, 0x31, 0xc2, 0xa4, 0x24, + 0x95, 0x46, 0xb3, 0xa6, 0xf5, 0x8a, 0x71, 0xab, 0xbb, 0x50, 0xd9, 0x87, 0x68, 0x02, 0x77, 0x6b, + 0x78, 0x74, 0xdc, 0x46, 0xb7, 0x8f, 0xdb, 0xe8, 0xf7, 0xe3, 0x36, 0xfa, 0xea, 0xa4, 0xbd, 0x70, + 0xfb, 0xa4, 0xbd, 0xf0, 0xeb, 0x49, 0x7b, 0xe1, 0xad, 0xeb, 0xae, 0x97, 0xec, 0x8d, 0x86, 0x86, + 0xcd, 0xf6, 0x4d, 0xbe, 0xbb, 0x8e, 0xec, 0x7e, 0xb8, 0x33, 0xde, 0x5f, 0x07, 0x6e, 0x88, 0x3f, + 0xf8, 0xef, 0x8e, 0x38, 0xb9, 0x15, 0xd2, 0x78, 0xf8, 0x40, 0x76, 0xd1, 0xfb, 0xd4, 0xbf, 0x01, + 0x00, 0x00, 0xff, 0xff, 0xfd, 0x01, 0xad, 0x60, 0x96, 0x18, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1596,7 +1598,7 @@ var _Query_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "dclauth/query.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/dclauth/query.proto", } func (m *QueryGetAccountRequest) Marshal() (dAtA []byte, err error) { diff --git a/x/dclauth/types/query.pb.gw.go b/x/dclauth/types/query.pb.gw.go index b57708c79..127ddaec5 100644 --- a/x/dclauth/types/query.pb.gw.go +++ b/x/dclauth/types/query.pb.gw.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: dclauth/query.proto +// source: zigbeealliance/distributedcomplianceledger/dclauth/query.proto /* Package types is a reverse proxy. @@ -1025,27 +1025,27 @@ func RegisterQueryHandlerClient(ctx context.Context, mux *runtime.ServeMux, clie } var ( - pattern_Query_Account_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "auth", "accounts", "address"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_Account_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "auth", "accounts", "address"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_AccountAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "auth", "accounts"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_AccountAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "auth", "accounts"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_PendingAccount_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "auth", "proposed-accounts", "address"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_PendingAccount_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "auth", "proposed-accounts", "address"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_PendingAccountAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "auth", "proposed-accounts"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_PendingAccountAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "auth", "proposed-accounts"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_PendingAccountRevocation_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "auth", "proposed-revocation-accounts", "address"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_PendingAccountRevocation_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "auth", "proposed-revocation-accounts", "address"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_PendingAccountRevocationAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "auth", "proposed-revocation-accounts"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_PendingAccountRevocationAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "auth", "proposed-revocation-accounts"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_AccountStat_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"dcl", "auth", "accounts", "stat"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_AccountStat_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"dcl", "auth", "accounts", "stat"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_RevokedAccount_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "auth", "revoked-accounts", "address"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_RevokedAccount_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "auth", "revoked-accounts", "address"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_RevokedAccountAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "auth", "revoked-accounts"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_RevokedAccountAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "auth", "revoked-accounts"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_RejectedAccount_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "auth", "rejected-accounts", "address"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_RejectedAccount_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "auth", "rejected-accounts", "address"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_RejectedAccountAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "auth", "rejected-accounts"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_RejectedAccountAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "auth", "rejected-accounts"}, "", runtime.AssumeColonVerbOpt(false))) ) var ( diff --git a/x/dclauth/types/rejected_account.pb.go b/x/dclauth/types/rejected_account.pb.go index 1b232f0c1..6de4a8b57 100644 --- a/x/dclauth/types/rejected_account.pb.go +++ b/x/dclauth/types/rejected_account.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclauth/rejected_account.proto +// source: zigbeealliance/distributedcomplianceledger/dclauth/rejected_account.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -31,7 +31,7 @@ func (m *RejectedAccount) Reset() { *m = RejectedAccount{} } func (m *RejectedAccount) String() string { return proto.CompactTextString(m) } func (*RejectedAccount) ProtoMessage() {} func (*RejectedAccount) Descriptor() ([]byte, []int) { - return fileDescriptor_591d10421fdd78e4, []int{0} + return fileDescriptor_013608431c2fa3ea, []int{0} } func (m *RejectedAccount) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -64,25 +64,27 @@ func init() { proto.RegisterType((*RejectedAccount)(nil), "zigbeealliance.distributedcomplianceledger.dclauth.RejectedAccount") } -func init() { proto.RegisterFile("dclauth/rejected_account.proto", fileDescriptor_591d10421fdd78e4) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclauth/rejected_account.proto", fileDescriptor_013608431c2fa3ea) +} -var fileDescriptor_591d10421fdd78e4 = []byte{ - // 238 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0x49, 0xce, 0x49, - 0x2c, 0x2d, 0xc9, 0xd0, 0x2f, 0x4a, 0xcd, 0x4a, 0x4d, 0x2e, 0x49, 0x4d, 0x89, 0x4f, 0x4c, 0x4e, - 0xce, 0x2f, 0xcd, 0x2b, 0xd1, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x32, 0xaa, 0xca, 0x4c, 0x4f, - 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, +var fileDescriptor_013608431c2fa3ea = []byte{ + // 232 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xf2, 0xac, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, - 0xa7, 0x16, 0xe9, 0x41, 0x8d, 0x92, 0x12, 0x49, 0xcf, 0x4f, 0xcf, 0x07, 0x6b, 0xd7, 0x07, 0xb1, - 0x20, 0x26, 0x49, 0x89, 0xc2, 0x6c, 0x42, 0xb1, 0x40, 0x4a, 0x18, 0x26, 0x9c, 0x5e, 0x94, 0x08, - 0x13, 0x54, 0xca, 0xe3, 0xe2, 0x0f, 0x82, 0xba, 0xc7, 0x11, 0xa2, 0x5a, 0x28, 0x9a, 0x8b, 0x1d, - 0xaa, 0x51, 0x82, 0x51, 0x81, 0x51, 0x83, 0xdb, 0xc8, 0x5a, 0x8f, 0x74, 0xa7, 0xe9, 0x41, 0x4d, - 0x73, 0x62, 0xb9, 0x70, 0x4f, 0x9e, 0x31, 0x08, 0x66, 0xa2, 0x53, 0xd2, 0x89, 0x47, 0x72, 0x8c, - 0x17, 0x1e, 0xc9, 0x31, 0x3e, 0x78, 0x24, 0xc7, 0x38, 0xe1, 0xb1, 0x1c, 0xc3, 0x85, 0xc7, 0x72, - 0x0c, 0x37, 0x1e, 0xcb, 0x31, 0x44, 0x79, 0xa4, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, - 0xe7, 0xea, 0x43, 0xec, 0xd3, 0x85, 0x59, 0xa8, 0x8f, 0x64, 0xa1, 0x2e, 0xc2, 0x46, 0x5d, 0x88, - 0x95, 0xfa, 0x15, 0xfa, 0x30, 0x9f, 0x95, 0x54, 0x16, 0xa4, 0x16, 0x27, 0xb1, 0x81, 0xbd, 0x66, - 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0x66, 0xd2, 0xf3, 0x64, 0x72, 0x01, 0x00, 0x00, + 0xa7, 0x16, 0xe9, 0xa7, 0x24, 0xe7, 0x24, 0x96, 0x96, 0x64, 0xe8, 0x17, 0xa5, 0x66, 0xa5, 0x26, + 0x97, 0xa4, 0xa6, 0xc4, 0x27, 0x26, 0x27, 0xe7, 0x97, 0xe6, 0x95, 0xe8, 0x15, 0x14, 0xe5, 0x97, + 0xe4, 0x0b, 0x19, 0xa1, 0x1a, 0xa5, 0x87, 0xc7, 0x28, 0x3d, 0xa8, 0x51, 0x52, 0x22, 0xe9, 0xf9, + 0xe9, 0xf9, 0x60, 0xed, 0xfa, 0x20, 0x16, 0xc4, 0x24, 0x29, 0x07, 0x32, 0x1c, 0x85, 0xe2, 0x16, + 0xa5, 0x3c, 0x2e, 0xfe, 0x20, 0xa8, 0x2b, 0x1d, 0x21, 0x12, 0x42, 0xd1, 0x5c, 0xec, 0x50, 0x35, + 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0xdc, 0x46, 0xd6, 0x7a, 0xa4, 0x3b, 0x58, 0x0f, 0x6a, 0x9a, 0x13, + 0xcb, 0x85, 0x7b, 0xf2, 0x8c, 0x41, 0x30, 0x13, 0x9d, 0x92, 0x4e, 0x3c, 0x92, 0x63, 0xbc, 0xf0, + 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x09, 0x8f, 0xe5, 0x18, 0x2e, 0x3c, 0x96, 0x63, 0xb8, + 0xf1, 0x58, 0x8e, 0x21, 0xca, 0x23, 0x3d, 0xb3, 0x24, 0xa3, 0x34, 0x49, 0x2f, 0x39, 0x3f, 0x57, + 0x1f, 0x62, 0x9f, 0x2e, 0x36, 0x7f, 0xe9, 0x22, 0x6c, 0xd4, 0x85, 0xfa, 0xac, 0x02, 0xee, 0xb7, + 0x92, 0xca, 0x82, 0xd4, 0xe2, 0x24, 0x36, 0xb0, 0xd7, 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, + 0x8a, 0x18, 0x2a, 0x46, 0xb3, 0x01, 0x00, 0x00, } func (m *RejectedAccount) Marshal() (dAtA []byte, err error) { diff --git a/x/dclauth/types/revoked_account.pb.go b/x/dclauth/types/revoked_account.pb.go index 1bab96aa9..23f14536c 100644 --- a/x/dclauth/types/revoked_account.pb.go +++ b/x/dclauth/types/revoked_account.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclauth/revoked_account.proto +// source: zigbeealliance/distributedcomplianceledger/dclauth/revoked_account.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -45,7 +45,7 @@ func (x RevokedAccount_Reason) String() string { } func (RevokedAccount_Reason) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_eabe3211506422ff, []int{0, 0} + return fileDescriptor_0da4e0e6766e981b, []int{0, 0} } type RevokedAccount struct { @@ -58,7 +58,7 @@ func (m *RevokedAccount) Reset() { *m = RevokedAccount{} } func (m *RevokedAccount) String() string { return proto.CompactTextString(m) } func (*RevokedAccount) ProtoMessage() {} func (*RevokedAccount) Descriptor() ([]byte, []int) { - return fileDescriptor_eabe3211506422ff, []int{0} + return fileDescriptor_0da4e0e6766e981b, []int{0} } func (m *RevokedAccount) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -106,32 +106,34 @@ func init() { proto.RegisterType((*RevokedAccount)(nil), "zigbeealliance.distributedcomplianceledger.dclauth.RevokedAccount") } -func init() { proto.RegisterFile("dclauth/revoked_account.proto", fileDescriptor_eabe3211506422ff) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclauth/revoked_account.proto", fileDescriptor_0da4e0e6766e981b) +} -var fileDescriptor_eabe3211506422ff = []byte{ - // 341 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x91, 0x3f, 0x4f, 0xc2, 0x40, - 0x18, 0xc6, 0x7b, 0x60, 0x30, 0x39, 0x22, 0x62, 0xfd, 0x13, 0x42, 0x62, 0x25, 0x4c, 0x2c, 0x5c, - 0x93, 0x32, 0x19, 0x27, 0x58, 0xd4, 0xc1, 0xa5, 0x31, 0x0c, 0x3a, 0x98, 0xeb, 0xf5, 0x72, 0x5c, - 0x3c, 0xfa, 0x36, 0xd7, 0x2b, 0x51, 0x3f, 0x85, 0x1f, 0xcb, 0x91, 0xd1, 0xc9, 0x18, 0xd8, 0xfc, - 0x14, 0x06, 0xae, 0x8d, 0xe2, 0x26, 0x5b, 0xf3, 0xa4, 0xef, 0xef, 0xf9, 0xbd, 0xf7, 0xe2, 0xd3, - 0x98, 0x29, 0x9a, 0x9b, 0x89, 0xaf, 0xf9, 0x0c, 0x1e, 0x79, 0xfc, 0x40, 0x19, 0x83, 0x3c, 0x31, - 0x24, 0xd5, 0x60, 0xc0, 0x0d, 0x5e, 0xa4, 0x88, 0x38, 0xa7, 0x4a, 0x49, 0x9a, 0x30, 0x4e, 0x62, - 0x99, 0x19, 0x2d, 0xa3, 0xdc, 0xf0, 0x98, 0xc1, 0x34, 0xb5, 0xa9, 0xe2, 0xb1, 0xe0, 0x9a, 0x14, - 0xa4, 0xf6, 0x91, 0x00, 0x01, 0xeb, 0x71, 0x7f, 0xf5, 0x65, 0x49, 0xed, 0xe3, 0xb2, 0x68, 0xa3, - 0xa0, 0x7d, 0x58, 0xc6, 0x42, 0xd3, 0x32, 0xec, 0x7e, 0x55, 0x70, 0x23, 0xb4, 0x3e, 0x43, 0xfb, - 0xb7, 0x7b, 0x8f, 0x77, 0x8b, 0xc1, 0x16, 0xea, 0xa0, 0x5e, 0x3d, 0xb8, 0x20, 0xff, 0x57, 0x23, - 0x05, 0x6d, 0xb4, 0x33, 0xff, 0x38, 0x43, 0x61, 0x49, 0x74, 0x19, 0xde, 0xb7, 0xeb, 0x0f, 0xd3, - 0x54, 0xc3, 0x8c, 0xaa, 0xac, 0x55, 0xe9, 0x54, 0x7b, 0xf5, 0xe0, 0x7c, 0x9b, 0x92, 0xcb, 0xd5, - 0x26, 0xe1, 0x5f, 0xa2, 0x4b, 0x71, 0x4d, 0x73, 0x9a, 0x41, 0xd2, 0xaa, 0x76, 0x50, 0xaf, 0x11, - 0x5c, 0x6f, 0xc3, 0xde, 0x7c, 0x15, 0x12, 0xae, 0x81, 0x61, 0x01, 0xee, 0x0e, 0x70, 0xcd, 0x26, - 0xee, 0x01, 0xde, 0xbb, 0xd5, 0x79, 0x66, 0x38, 0x1f, 0x83, 0x91, 0x89, 0x68, 0x3a, 0xee, 0x09, - 0x76, 0x6f, 0xa8, 0x92, 0x4c, 0x42, 0x9e, 0x8d, 0xa9, 0x92, 0x31, 0x35, 0xa0, 0x9b, 0x68, 0x14, - 0xbd, 0x2d, 0x3c, 0x34, 0x5f, 0x78, 0xe8, 0x73, 0xe1, 0xa1, 0xd7, 0xa5, 0xe7, 0xcc, 0x97, 0x9e, - 0xf3, 0xbe, 0xf4, 0x9c, 0xbb, 0x2b, 0x21, 0xcd, 0x24, 0x8f, 0x08, 0x83, 0xa9, 0x6f, 0x5d, 0xfb, - 0xa5, 0xac, 0xff, 0x4b, 0xb6, 0xff, 0x63, 0xdb, 0xb7, 0xba, 0xfe, 0x93, 0x5f, 0x9e, 0xd5, 0x3c, - 0xa7, 0x3c, 0x8b, 0x6a, 0xeb, 0xbb, 0x0e, 0xbe, 0x03, 0x00, 0x00, 0xff, 0xff, 0x59, 0x0c, 0xe4, - 0x82, 0x6e, 0x02, 0x00, 0x00, +var fileDescriptor_0da4e0e6766e981b = []byte{ + // 345 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0xd1, 0xb1, 0x4a, 0x03, 0x31, + 0x18, 0x07, 0xf0, 0x4b, 0x2b, 0x15, 0x52, 0xac, 0x35, 0x88, 0x94, 0x0e, 0x67, 0xe9, 0xd4, 0xa5, + 0x39, 0xb8, 0x4e, 0x22, 0x88, 0xed, 0x62, 0x1d, 0x5c, 0x0e, 0xe9, 0xa0, 0x83, 0xe4, 0x72, 0x21, + 0x0d, 0xa6, 0x97, 0x23, 0x97, 0x2b, 0xea, 0x53, 0xf8, 0x58, 0x8e, 0x1d, 0x9d, 0x44, 0xda, 0xcd, + 0xa7, 0x90, 0x36, 0x57, 0xe5, 0x44, 0x04, 0x6f, 0x0b, 0x81, 0xfc, 0xf2, 0xff, 0xbe, 0x3f, 0x1c, + 0x3f, 0x09, 0x1e, 0x32, 0x46, 0xa4, 0x14, 0x24, 0xa6, 0xcc, 0x8b, 0x44, 0x6a, 0xb4, 0x08, 0x33, + 0xc3, 0x22, 0xaa, 0x66, 0x89, 0xbd, 0x95, 0x2c, 0xe2, 0x4c, 0x7b, 0x11, 0x95, 0x24, 0x33, 0x53, + 0x4f, 0xb3, 0xb9, 0xba, 0x67, 0xd1, 0x1d, 0xa1, 0x54, 0x65, 0xb1, 0xc1, 0x89, 0x56, 0x46, 0x21, + 0xbf, 0x28, 0xe1, 0x3f, 0x24, 0x9c, 0x4b, 0xed, 0x43, 0xae, 0xb8, 0xda, 0x3c, 0xf7, 0xd6, 0x27, + 0x2b, 0xb5, 0xcf, 0x4b, 0x64, 0x2a, 0x64, 0x69, 0x9f, 0x95, 0x10, 0xb8, 0x26, 0xdb, 0xf7, 0xdd, + 0x8f, 0x0a, 0x6c, 0x04, 0x76, 0xca, 0xa1, 0x85, 0xd1, 0x2d, 0xdc, 0xcd, 0xff, 0x68, 0x81, 0x0e, + 0xe8, 0xd5, 0xfd, 0x53, 0xfc, 0xff, 0x81, 0x71, 0xae, 0x8d, 0x76, 0x16, 0x6f, 0xc7, 0x20, 0xd8, + 0x8a, 0x88, 0xc2, 0x7d, 0xbb, 0xd4, 0x61, 0x92, 0x68, 0x35, 0x27, 0x32, 0x6d, 0x55, 0x3a, 0xd5, + 0x5e, 0xdd, 0x3f, 0x29, 0xf3, 0xc9, 0xc5, 0x7a, 0x92, 0xe0, 0xa7, 0x88, 0x08, 0xac, 0x69, 0x46, + 0x52, 0x15, 0xb7, 0xaa, 0x1d, 0xd0, 0x6b, 0xf8, 0x97, 0x65, 0xec, 0xe2, 0x56, 0x70, 0xb0, 0x01, + 0x83, 0x1c, 0xee, 0x0e, 0x60, 0xcd, 0xde, 0xa0, 0x03, 0xb8, 0x77, 0xad, 0xb3, 0xd4, 0x30, 0x36, + 0x51, 0x46, 0xc4, 0xbc, 0xe9, 0xa0, 0x23, 0x88, 0xae, 0x88, 0x14, 0x54, 0xa8, 0x2c, 0x9d, 0x10, + 0x29, 0x22, 0x62, 0x94, 0x6e, 0x82, 0x51, 0xf8, 0xb2, 0x74, 0xc1, 0x62, 0xe9, 0x82, 0xf7, 0xa5, + 0x0b, 0x9e, 0x57, 0xae, 0xb3, 0x58, 0xb9, 0xce, 0xeb, 0xca, 0x75, 0x6e, 0xc6, 0x5c, 0x98, 0x69, + 0x16, 0x62, 0xaa, 0x66, 0x9e, 0xcd, 0xda, 0xff, 0xad, 0xd2, 0xfe, 0x77, 0xda, 0x7e, 0x5e, 0xea, + 0xc3, 0x57, 0xad, 0xe6, 0x31, 0x61, 0x69, 0x58, 0xdb, 0xf4, 0x3a, 0xf8, 0x0c, 0x00, 0x00, 0xff, + 0xff, 0x98, 0x8f, 0x5d, 0xaf, 0xef, 0x02, 0x00, 0x00, } func (m *RevokedAccount) Marshal() (dAtA []byte, err error) { diff --git a/x/dclauth/types/tx.pb.go b/x/dclauth/types/tx.pb.go index f6f75c2ac..0666d0027 100644 --- a/x/dclauth/types/tx.pb.go +++ b/x/dclauth/types/tx.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclauth/tx.proto +// source: zigbeealliance/distributedcomplianceledger/dclauth/tx.proto package types @@ -8,9 +8,9 @@ import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" types "github.com/cosmos/cosmos-sdk/codec/types" - _ "github.com/gogo/protobuf/gogoproto" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" types1 "github.com/zigbee-alliance/distributed-compliance-ledger/x/common/types" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -46,7 +46,7 @@ func (m *MsgProposeAddAccount) Reset() { *m = MsgProposeAddAccount{} } func (m *MsgProposeAddAccount) String() string { return proto.CompactTextString(m) } func (*MsgProposeAddAccount) ProtoMessage() {} func (*MsgProposeAddAccount) Descriptor() ([]byte, []int) { - return fileDescriptor_ba91727cb044acc7, []int{0} + return fileDescriptor_74a6e066d63491cc, []int{0} } func (m *MsgProposeAddAccount) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -138,7 +138,7 @@ func (m *MsgProposeAddAccountResponse) Reset() { *m = MsgProposeAddAccou func (m *MsgProposeAddAccountResponse) String() string { return proto.CompactTextString(m) } func (*MsgProposeAddAccountResponse) ProtoMessage() {} func (*MsgProposeAddAccountResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ba91727cb044acc7, []int{1} + return fileDescriptor_74a6e066d63491cc, []int{1} } func (m *MsgProposeAddAccountResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -178,7 +178,7 @@ func (m *MsgApproveAddAccount) Reset() { *m = MsgApproveAddAccount{} } func (m *MsgApproveAddAccount) String() string { return proto.CompactTextString(m) } func (*MsgApproveAddAccount) ProtoMessage() {} func (*MsgApproveAddAccount) Descriptor() ([]byte, []int) { - return fileDescriptor_ba91727cb044acc7, []int{2} + return fileDescriptor_74a6e066d63491cc, []int{2} } func (m *MsgApproveAddAccount) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -242,7 +242,7 @@ func (m *MsgApproveAddAccountResponse) Reset() { *m = MsgApproveAddAccou func (m *MsgApproveAddAccountResponse) String() string { return proto.CompactTextString(m) } func (*MsgApproveAddAccountResponse) ProtoMessage() {} func (*MsgApproveAddAccountResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ba91727cb044acc7, []int{3} + return fileDescriptor_74a6e066d63491cc, []int{3} } func (m *MsgApproveAddAccountResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -282,7 +282,7 @@ func (m *MsgProposeRevokeAccount) Reset() { *m = MsgProposeRevokeAccount func (m *MsgProposeRevokeAccount) String() string { return proto.CompactTextString(m) } func (*MsgProposeRevokeAccount) ProtoMessage() {} func (*MsgProposeRevokeAccount) Descriptor() ([]byte, []int) { - return fileDescriptor_ba91727cb044acc7, []int{4} + return fileDescriptor_74a6e066d63491cc, []int{4} } func (m *MsgProposeRevokeAccount) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -346,7 +346,7 @@ func (m *MsgProposeRevokeAccountResponse) Reset() { *m = MsgProposeRevok func (m *MsgProposeRevokeAccountResponse) String() string { return proto.CompactTextString(m) } func (*MsgProposeRevokeAccountResponse) ProtoMessage() {} func (*MsgProposeRevokeAccountResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ba91727cb044acc7, []int{5} + return fileDescriptor_74a6e066d63491cc, []int{5} } func (m *MsgProposeRevokeAccountResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -386,7 +386,7 @@ func (m *MsgApproveRevokeAccount) Reset() { *m = MsgApproveRevokeAccount func (m *MsgApproveRevokeAccount) String() string { return proto.CompactTextString(m) } func (*MsgApproveRevokeAccount) ProtoMessage() {} func (*MsgApproveRevokeAccount) Descriptor() ([]byte, []int) { - return fileDescriptor_ba91727cb044acc7, []int{6} + return fileDescriptor_74a6e066d63491cc, []int{6} } func (m *MsgApproveRevokeAccount) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -450,7 +450,7 @@ func (m *MsgApproveRevokeAccountResponse) Reset() { *m = MsgApproveRevok func (m *MsgApproveRevokeAccountResponse) String() string { return proto.CompactTextString(m) } func (*MsgApproveRevokeAccountResponse) ProtoMessage() {} func (*MsgApproveRevokeAccountResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ba91727cb044acc7, []int{7} + return fileDescriptor_74a6e066d63491cc, []int{7} } func (m *MsgApproveRevokeAccountResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -490,7 +490,7 @@ func (m *MsgRejectAddAccount) Reset() { *m = MsgRejectAddAccount{} } func (m *MsgRejectAddAccount) String() string { return proto.CompactTextString(m) } func (*MsgRejectAddAccount) ProtoMessage() {} func (*MsgRejectAddAccount) Descriptor() ([]byte, []int) { - return fileDescriptor_ba91727cb044acc7, []int{8} + return fileDescriptor_74a6e066d63491cc, []int{8} } func (m *MsgRejectAddAccount) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -554,7 +554,7 @@ func (m *MsgRejectAddAccountResponse) Reset() { *m = MsgRejectAddAccount func (m *MsgRejectAddAccountResponse) String() string { return proto.CompactTextString(m) } func (*MsgRejectAddAccountResponse) ProtoMessage() {} func (*MsgRejectAddAccountResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_ba91727cb044acc7, []int{9} + return fileDescriptor_74a6e066d63491cc, []int{9} } func (m *MsgRejectAddAccountResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -596,55 +596,58 @@ func init() { proto.RegisterType((*MsgRejectAddAccountResponse)(nil), "zigbeealliance.distributedcomplianceledger.dclauth.MsgRejectAddAccountResponse") } -func init() { proto.RegisterFile("dclauth/tx.proto", fileDescriptor_ba91727cb044acc7) } - -var fileDescriptor_ba91727cb044acc7 = []byte{ - // 720 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x96, 0x4f, 0x4f, 0x13, 0x4f, - 0x18, 0xc7, 0x99, 0x5f, 0x4b, 0x81, 0xe1, 0xc2, 0x6f, 0x69, 0x64, 0xa9, 0xba, 0xad, 0x3d, 0x98, - 0x26, 0xda, 0x5d, 0x2c, 0x42, 0x22, 0x11, 0x93, 0x36, 0x24, 0x40, 0x08, 0x91, 0x2c, 0xf1, 0x62, - 0xa2, 0x64, 0xff, 0x3c, 0x0c, 0xab, 0xdb, 0x9d, 0x75, 0x66, 0xb6, 0xa1, 0xbe, 0x02, 0x4f, 0xc6, - 0x17, 0xe1, 0xd1, 0xc4, 0x8b, 0x07, 0x5f, 0x82, 0xf1, 0x44, 0x3c, 0x79, 0x22, 0x06, 0x8e, 0x26, - 0x1e, 0x38, 0x7a, 0x32, 0xec, 0x9f, 0x96, 0xd2, 0xd6, 0x84, 0xd2, 0x83, 0xd1, 0xdb, 0xec, 0xce, - 0xce, 0xf7, 0xfb, 0x7d, 0x3e, 0x79, 0x66, 0x66, 0xf1, 0x94, 0x6d, 0xb9, 0x46, 0x20, 0xf6, 0x34, - 0xb1, 0xaf, 0xfa, 0x8c, 0x0a, 0x2a, 0x55, 0x5e, 0x3a, 0xc4, 0x04, 0x30, 0x5c, 0xd7, 0x31, 0x3c, - 0x0b, 0x54, 0xdb, 0xe1, 0x82, 0x39, 0x66, 0x20, 0xc0, 0xb6, 0x68, 0xdd, 0x8f, 0xde, 0xba, 0x60, - 0x13, 0x60, 0x6a, 0xbc, 0x38, 0x37, 0x4b, 0x28, 0x25, 0x2e, 0x68, 0xa1, 0x82, 0x19, 0xec, 0x6a, - 0x86, 0xd7, 0x8c, 0xe4, 0x72, 0x59, 0x42, 0x09, 0x0d, 0x87, 0xda, 0xe9, 0x28, 0x7e, 0x3b, 0x6b, - 0x51, 0x5e, 0xa7, 0x7c, 0x27, 0x9a, 0x88, 0x1e, 0xda, 0x53, 0xf5, 0x3a, 0xf5, 0xb4, 0xc0, 0xf1, - 0xc4, 0x9d, 0xc5, 0x1d, 0x66, 0x78, 0x04, 0xa2, 0xa9, 0xe2, 0xdb, 0x34, 0xce, 0x6e, 0x72, 0xb2, - 0xc5, 0xa8, 0x4f, 0x39, 0x54, 0x6d, 0xbb, 0x6a, 0x59, 0x34, 0xf0, 0x84, 0xb4, 0x8a, 0x33, 0xdc, - 0x21, 0x1e, 0x30, 0x19, 0x15, 0x50, 0x69, 0xa2, 0xa6, 0x9d, 0x1c, 0xe6, 0xa7, 0x1b, 0x86, 0xeb, - 0xd8, 0x86, 0x80, 0xa5, 0x22, 0x83, 0x17, 0x81, 0xc3, 0xc0, 0x2e, 0x7e, 0xf9, 0x50, 0xce, 0xc6, - 0x66, 0x55, 0xdb, 0x66, 0xc0, 0xf9, 0xb6, 0x60, 0x8e, 0x47, 0xf4, 0x78, 0xb9, 0xb4, 0x8e, 0xc7, - 0x8c, 0x68, 0x42, 0xfe, 0x6f, 0x30, 0xa5, 0x64, 0xbd, 0xf4, 0x04, 0x67, 0xfc, 0xc0, 0xdc, 0x80, - 0xa6, 0x9c, 0x2a, 0xa0, 0xd2, 0x64, 0x25, 0xab, 0x46, 0x90, 0xd4, 0x04, 0x92, 0x5a, 0xf5, 0x9a, - 0x7d, 0xf5, 0x3f, 0xb7, 0xf5, 0x2d, 0xd6, 0xf4, 0x05, 0x55, 0xb7, 0x42, 0x31, 0x3d, 0x16, 0x95, - 0xee, 0xe3, 0x51, 0x46, 0x5d, 0xe0, 0x72, 0xba, 0x90, 0x2a, 0x4d, 0xd4, 0x6e, 0xf6, 0xd1, 0xf9, - 0x79, 0x98, 0x9f, 0x8c, 0x29, 0xe9, 0xd4, 0x05, 0x3d, 0x5a, 0x24, 0x2d, 0xe1, 0xf1, 0x06, 0x78, - 0x36, 0x65, 0xeb, 0x2b, 0xf2, 0x68, 0x01, 0x95, 0x46, 0x6b, 0xca, 0xc9, 0x61, 0x3e, 0xd7, 0x16, - 0x20, 0x02, 0x96, 0xe7, 0x6e, 0xbb, 0x02, 0x96, 0x17, 0x17, 0x16, 0xe6, 0x17, 0x8a, 0x7a, 0xeb, - 0x7b, 0xe9, 0x16, 0x4e, 0x3b, 0xde, 0x2e, 0x95, 0x33, 0x21, 0xa0, 0x99, 0x4e, 0xe3, 0xba, 0xb1, - 0xbf, 0x7c, 0x77, 0xee, 0xde, 0x62, 0x51, 0x0f, 0x3f, 0x92, 0x24, 0x9c, 0x16, 0x4e, 0x1d, 0xe4, - 0xb1, 0x02, 0x2a, 0xa5, 0xf4, 0x70, 0x2c, 0x3d, 0xc5, 0xd8, 0x67, 0xd4, 0x0e, 0x2c, 0xb1, 0xbe, - 0xc2, 0xe5, 0xf1, 0x42, 0xaa, 0x34, 0x59, 0x79, 0xa0, 0x5e, 0xa0, 0xed, 0xa2, 0x0e, 0x51, 0x1f, - 0x85, 0x1d, 0xa2, 0x9f, 0x36, 0x88, 0x7e, 0x46, 0xb1, 0xa8, 0xe0, 0x6b, 0xbd, 0xba, 0x44, 0x07, - 0xee, 0x53, 0x8f, 0x43, 0xf1, 0x3b, 0x0a, 0xdb, 0xa8, 0xea, 0xfb, 0x8c, 0x36, 0xfe, 0xf4, 0x36, - 0x4a, 0x68, 0xa7, 0x2e, 0x42, 0x3b, 0xdd, 0xa6, 0x1d, 0xd3, 0xe8, 0x2a, 0xb6, 0x45, 0xe3, 0x07, - 0xc2, 0x33, 0x6d, 0x5c, 0x3a, 0x34, 0xe8, 0x73, 0xf8, 0xab, 0x81, 0xdc, 0xc0, 0xf9, 0x3e, 0xf5, - 0x9e, 0x67, 0x12, 0x43, 0xfb, 0x67, 0x98, 0xf4, 0xaa, 0xb7, 0xc5, 0xe4, 0x15, 0xc2, 0xd3, 0x9b, - 0x9c, 0xe8, 0xf0, 0x0c, 0x2c, 0x71, 0x66, 0xd3, 0x5c, 0xe9, 0xe4, 0xd1, 0x2a, 0x4f, 0x3e, 0x57, - 0xde, 0x10, 0xd3, 0x5e, 0xc7, 0x57, 0x7b, 0x24, 0x49, 0x92, 0x56, 0x5e, 0x8f, 0xe1, 0xd4, 0x26, - 0x27, 0xd2, 0x7b, 0x84, 0xff, 0xef, 0xbe, 0x2b, 0xd6, 0xd4, 0x8b, 0x5f, 0x70, 0x6a, 0xaf, 0xf3, - 0x24, 0xb7, 0x35, 0x2c, 0xa5, 0x24, 0x79, 0x98, 0xb8, 0xfb, 0x58, 0x1a, 0x34, 0x71, 0x97, 0xd2, - 0xc0, 0x89, 0xfb, 0x9e, 0x1e, 0xd2, 0x47, 0x84, 0xb3, 0x3d, 0x8f, 0x8e, 0x8d, 0xcb, 0xc1, 0xe9, - 0x10, 0xcb, 0x6d, 0x0f, 0x51, 0xac, 0x23, 0x7a, 0xcf, 0x1d, 0xbe, 0x71, 0x39, 0x4a, 0xc3, 0x89, - 0xfe, 0xbb, 0xbd, 0x28, 0xbd, 0x43, 0x78, 0xaa, 0x6b, 0x23, 0xae, 0x0e, 0xe8, 0x74, 0x5e, 0x28, - 0xf7, 0x70, 0x48, 0x42, 0x49, 0xdc, 0x9a, 0xf9, 0xe9, 0x48, 0x41, 0x07, 0x47, 0x0a, 0xfa, 0x76, - 0xa4, 0xa0, 0x37, 0xc7, 0xca, 0xc8, 0xc1, 0xb1, 0x32, 0xf2, 0xf5, 0x58, 0x19, 0x79, 0xbc, 0x46, - 0x1c, 0xb1, 0x17, 0x98, 0xa7, 0x37, 0xba, 0x16, 0x99, 0x96, 0x13, 0x57, 0xed, 0x8c, 0x6b, 0xb9, - 0x6d, 0x5b, 0x8e, 0x7c, 0xb5, 0x7d, 0xad, 0xf5, 0xe7, 0xda, 0xf4, 0x81, 0x9b, 0x99, 0xf0, 0xb7, - 0x6a, 0xfe, 0x57, 0x00, 0x00, 0x00, 0xff, 0xff, 0x2c, 0x26, 0xc6, 0x46, 0xd1, 0x0a, 0x00, 0x00, +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclauth/tx.proto", fileDescriptor_74a6e066d63491cc) +} + +var fileDescriptor_74a6e066d63491cc = []byte{ + // 726 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x96, 0x41, 0x4f, 0x13, 0x41, + 0x14, 0xc7, 0x19, 0x5b, 0x0a, 0x0c, 0x17, 0x5d, 0x1a, 0x59, 0xaa, 0x6e, 0x6b, 0x0f, 0xa6, 0x89, + 0x76, 0x17, 0x8b, 0x90, 0x88, 0x62, 0xd2, 0x86, 0x04, 0x08, 0x21, 0x92, 0x25, 0x5e, 0x4c, 0x94, + 0x6c, 0x77, 0x1f, 0xc3, 0xea, 0x76, 0x67, 0x9d, 0x99, 0x6d, 0xa8, 0x9f, 0xc0, 0x93, 0xf1, 0x43, + 0x78, 0x34, 0xf1, 0xe2, 0xc1, 0x8f, 0x60, 0x3c, 0x11, 0x4f, 0x9e, 0x88, 0x81, 0xa3, 0x89, 0x07, + 0x8e, 0x9e, 0x0c, 0xbb, 0xdb, 0x96, 0xd2, 0x2d, 0x49, 0x4b, 0x0f, 0x46, 0x6f, 0xb3, 0x3b, 0xf3, + 0xfe, 0xf3, 0x7f, 0xbf, 0xbc, 0x99, 0x79, 0xf8, 0xc1, 0x6b, 0x9b, 0x54, 0x01, 0x0c, 0xc7, 0xb1, + 0x0d, 0xd7, 0x04, 0xcd, 0xb2, 0xb9, 0x60, 0x76, 0xd5, 0x17, 0x60, 0x99, 0xb4, 0xe6, 0x85, 0x7f, + 0x1d, 0xb0, 0x08, 0x30, 0xcd, 0x32, 0x1d, 0xc3, 0x17, 0xbb, 0x9a, 0xd8, 0x53, 0x3d, 0x46, 0x05, + 0x95, 0x4a, 0x9d, 0xc1, 0xea, 0x39, 0xc1, 0x6a, 0x14, 0x9c, 0x99, 0x21, 0x94, 0x12, 0x07, 0xb4, + 0x40, 0xa1, 0xea, 0xef, 0x68, 0x86, 0xdb, 0x08, 0xe5, 0x32, 0x69, 0x42, 0x09, 0x0d, 0x86, 0xda, + 0xc9, 0x28, 0xfa, 0x3b, 0x63, 0x52, 0x5e, 0xa3, 0x7c, 0x3b, 0x9c, 0x08, 0x3f, 0xa2, 0xa9, 0xe5, + 0x3e, 0xcc, 0x9b, 0xb4, 0x56, 0xa3, 0xae, 0xe6, 0xdb, 0xae, 0xb8, 0xbb, 0xb0, 0xcd, 0x0c, 0x97, + 0x40, 0xa8, 0x92, 0x7f, 0x9f, 0xc4, 0xe9, 0x0d, 0x4e, 0x36, 0x19, 0xf5, 0x28, 0x87, 0xb2, 0x65, + 0x95, 0x4d, 0x93, 0xfa, 0xae, 0x90, 0x56, 0x70, 0x8a, 0xdb, 0xc4, 0x05, 0x26, 0xa3, 0x1c, 0x2a, + 0x4c, 0x54, 0xb4, 0xe3, 0x83, 0xec, 0x54, 0xdd, 0x70, 0x6c, 0xcb, 0x10, 0xb0, 0x98, 0x67, 0xf0, + 0xca, 0xb7, 0x19, 0x58, 0xf9, 0x6f, 0x9f, 0x8a, 0xe9, 0xc8, 0x57, 0xd9, 0xb2, 0x18, 0x70, 0xbe, + 0x25, 0x98, 0xed, 0x12, 0x3d, 0x0a, 0x97, 0xd6, 0xf0, 0x98, 0x11, 0x4e, 0xc8, 0x97, 0x06, 0x53, + 0x6a, 0xc6, 0x4b, 0xcf, 0x70, 0xca, 0xf3, 0xab, 0xeb, 0xd0, 0x90, 0x13, 0x39, 0x54, 0x98, 0x2c, + 0xa5, 0xd5, 0x90, 0xa7, 0xda, 0xe4, 0xa9, 0x96, 0xdd, 0x46, 0x4f, 0xfd, 0xaf, 0x6d, 0x7d, 0x93, + 0x35, 0x3c, 0x41, 0xd5, 0xcd, 0x40, 0x4c, 0x8f, 0x44, 0xa5, 0x87, 0x78, 0x94, 0x51, 0x07, 0xb8, + 0x9c, 0xcc, 0x25, 0x0a, 0x13, 0x95, 0x5b, 0x3d, 0x74, 0x7e, 0x1f, 0x64, 0x27, 0x23, 0x4a, 0x3a, + 0x75, 0x40, 0x0f, 0x83, 0xa4, 0x45, 0x3c, 0x5e, 0x07, 0xd7, 0xa2, 0x6c, 0x6d, 0x59, 0x1e, 0xcd, + 0xa1, 0xc2, 0x68, 0x45, 0x39, 0x3e, 0xc8, 0x66, 0xda, 0x02, 0x44, 0xc0, 0xd2, 0xec, 0x1d, 0x47, + 0xc0, 0xd2, 0xc2, 0xfc, 0xfc, 0xdc, 0x7c, 0x5e, 0x6f, 0xad, 0x97, 0x6e, 0xe3, 0xa4, 0xed, 0xee, + 0x50, 0x39, 0x15, 0x00, 0x9a, 0xee, 0xdc, 0xb8, 0x66, 0xec, 0x2d, 0xdd, 0x9b, 0xbd, 0xbf, 0x90, + 0xd7, 0x83, 0x45, 0x92, 0x84, 0x93, 0xc2, 0xae, 0x81, 0x3c, 0x96, 0x43, 0x85, 0x84, 0x1e, 0x8c, + 0xa5, 0xe7, 0x18, 0x7b, 0x8c, 0x5a, 0xbe, 0x29, 0xd6, 0x96, 0xb9, 0x3c, 0x9e, 0x4b, 0x14, 0x26, + 0x4b, 0x8f, 0xd4, 0x3e, 0x2a, 0x34, 0xac, 0x10, 0xf5, 0x49, 0x50, 0x21, 0xfa, 0x49, 0x81, 0xe8, + 0xa7, 0x14, 0xf3, 0x0a, 0xbe, 0x1e, 0x57, 0x25, 0x3a, 0x70, 0x8f, 0xba, 0x1c, 0xf2, 0x3f, 0x51, + 0x50, 0x46, 0x65, 0xcf, 0x63, 0xb4, 0xfe, 0xb7, 0x97, 0x51, 0x93, 0x76, 0xa2, 0x1f, 0xda, 0xc9, + 0x36, 0xed, 0x88, 0x46, 0x57, 0xb2, 0x2d, 0x1a, 0xbf, 0x10, 0x9e, 0x6e, 0xe3, 0xd2, 0xa1, 0x4e, + 0x5f, 0xc2, 0x3f, 0x0d, 0xe4, 0x26, 0xce, 0xf6, 0xc8, 0xf7, 0x2c, 0x93, 0x08, 0xda, 0x7f, 0xc3, + 0x24, 0x2e, 0xdf, 0x16, 0x93, 0x37, 0x08, 0x4f, 0x6d, 0x70, 0xa2, 0xc3, 0x0b, 0x30, 0xc5, 0xa9, + 0x43, 0x73, 0xb5, 0x93, 0x47, 0x2b, 0x3d, 0xf9, 0x4c, 0x7a, 0x43, 0x74, 0x7b, 0x03, 0x5f, 0x8b, + 0x71, 0xd2, 0x74, 0x5a, 0x7a, 0x3b, 0x86, 0x13, 0x1b, 0x9c, 0x48, 0x1f, 0x11, 0xbe, 0xd2, 0xfd, + 0x56, 0xac, 0xaa, 0xfd, 0xbf, 0x85, 0x6a, 0xdc, 0x7d, 0x92, 0xd9, 0x1c, 0x96, 0x52, 0xd3, 0x79, + 0xe0, 0xb8, 0xfb, 0x5a, 0x1a, 0xd4, 0x71, 0x97, 0xd2, 0xc0, 0x8e, 0x7b, 0xde, 0x1e, 0xd2, 0x67, + 0x84, 0xd3, 0xb1, 0x57, 0xc7, 0xfa, 0xc5, 0xe0, 0x74, 0x88, 0x65, 0xb6, 0x86, 0x28, 0xd6, 0x61, + 0x3d, 0xf6, 0x84, 0xaf, 0x5f, 0x8c, 0xd2, 0x70, 0xac, 0x9f, 0x77, 0x16, 0xa5, 0x0f, 0x08, 0x5f, + 0xee, 0x3a, 0x88, 0x2b, 0x03, 0xee, 0x74, 0x56, 0x28, 0xf3, 0x78, 0x48, 0x42, 0x4d, 0xbb, 0x95, + 0xea, 0x97, 0x43, 0x05, 0xed, 0x1f, 0x2a, 0xe8, 0xc7, 0xa1, 0x82, 0xde, 0x1d, 0x29, 0x23, 0xfb, + 0x47, 0xca, 0xc8, 0xf7, 0x23, 0x65, 0xe4, 0xe9, 0x2a, 0xb1, 0xc5, 0xae, 0x5f, 0x3d, 0x79, 0xd1, + 0xb5, 0x70, 0xd3, 0x62, 0x5c, 0x8f, 0x58, 0x6c, 0x6f, 0x5b, 0x8c, 0xba, 0xc4, 0xbd, 0x76, 0x93, + 0xdb, 0xf0, 0x80, 0x57, 0x53, 0x41, 0x5b, 0x35, 0xf7, 0x27, 0x00, 0x00, 0xff, 0xff, 0x43, 0xc3, + 0x7b, 0x89, 0x27, 0x0b, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -868,7 +871,7 @@ var _Msg_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "dclauth/tx.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/dclauth/tx.proto", } func (m *MsgProposeAddAccount) Marshal() (dAtA []byte, err error) { diff --git a/x/dclgenutil/client/cli/collect.go b/x/dclgenutil/client/cli/collect.go index 8aa9a9e51..0a993b4d4 100644 --- a/x/dclgenutil/client/cli/collect.go +++ b/x/dclgenutil/client/cli/collect.go @@ -4,12 +4,13 @@ import ( "encoding/json" "path/filepath" + tmtypes "github.com/cometbft/cometbft/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/flags" "github.com/cosmos/cosmos-sdk/server" "github.com/pkg/errors" "github.com/spf13/cobra" - tmtypes "github.com/tendermint/tendermint/types" + dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclgenutil" ) @@ -25,7 +26,11 @@ func CollectGenTxsCmd(genAccIterator dclauthtypes.GenesisAccountsIterator, defau serverCtx := server.GetServerContextFromCmd(cmd) config := serverCtx.Config - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + cdc := clientCtx.Codec config.SetRoot(clientCtx.HomeDir) diff --git a/x/dclgenutil/client/cli/gentx.go b/x/dclgenutil/client/cli/gentx.go index b9da1b503..22e666f1f 100644 --- a/x/dclgenutil/client/cli/gentx.go +++ b/x/dclgenutil/client/cli/gentx.go @@ -9,6 +9,8 @@ import ( "os" "path/filepath" + tmos "github.com/cometbft/cometbft/libs/os" + tmtypes "github.com/cometbft/cometbft/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/flags" "github.com/cosmos/cosmos-sdk/client/tx" @@ -20,8 +22,6 @@ import ( authclient "github.com/cosmos/cosmos-sdk/x/auth/client" "github.com/pkg/errors" "github.com/spf13/cobra" - tmos "github.com/tendermint/tendermint/libs/os" - tmtypes "github.com/tendermint/tendermint/types" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/cli" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclgenutil" @@ -110,17 +110,23 @@ $ %s gentx my-key-name --home=/path/to/home/dir --keyring-backend=os --chain-id= return errors.Wrap(err, "error creating configuration to create validator msg") } - addr := key.GetAddress() + addr, err := key.GetAddress() + if err != nil { + return err + } err = dclgenutil.ValidateAccountInGenesis(genesisState, genAccIterator, addr, cdc) if err != nil { return errors.Wrap(err, "failed to validate account in genesis") } - txFactory := tx.NewFactoryCLI(clientCtx, cmd.Flags()) + txFactory, err := tx.NewFactoryCLI(clientCtx, cmd.Flags()) if err != nil { return errors.Wrap(err, "error creating tx builder") } - pub := key.GetAddress() + pub, err := key.GetAddress() + if err != nil { + return err + } clientCtx = clientCtx.WithInput(inBuf).WithFromAddress(pub) // create a 'create-validator' message @@ -131,15 +137,15 @@ $ %s gentx my-key-name --home=/path/to/home/dir --keyring-backend=os --chain-id= if key.GetType() == keyring.TypeOffline || key.GetType() == keyring.TypeMulti { cmd.PrintErrln("Offline key passed in. Use `tx sign` command to sign.") - // return txBldr.PrintUnsignedTx(clientCtx, msg) - return authclient.PrintUnsignedStdTx(txBldr, clientCtx, []sdk.Msg{msg}) + + return txBldr.PrintUnsignedTx(clientCtx, msg) } // write the unsigned transaction to the buffer w := bytes.NewBuffer([]byte{}) clientCtx = clientCtx.WithOutput(w) - if err = authclient.PrintUnsignedStdTx(txBldr, clientCtx, []sdk.Msg{msg}); err != nil { + if err = txBldr.PrintUnsignedTx(clientCtx, msg); err != nil { // if err = txBldr.PrintUnsignedTx(clientCtx, msg); err != nil { return errors.Wrap(err, "failed to print unsigned std tx") } @@ -181,7 +187,6 @@ $ %s gentx my-key-name --home=/path/to/home/dir --keyring-backend=os --chain-id= cmd.Flags().String(flags.FlagHome, defaultNodeHome, "The application home directory") cmd.Flags().String(flags.FlagOutputDocument, "", "Write the genesis transaction JSON document to the given file instead of the default location") - cmd.Flags().String(flags.FlagChainID, "", "The network chain ID") cmd.Flags().AddFlagSet(fsCreateValidator) cli.AddTxFlagsToCmd(cmd) @@ -212,7 +217,7 @@ func readUnsignedGenTxFile(clientCtx client.Context, r io.Reader) (sdk.Tx, error } func writeSignedGenTx(clientCtx client.Context, outputDocument string, tx sdk.Tx) error { - outputFile, err := os.OpenFile(outputDocument, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0o644) + outputFile, err := os.OpenFile(outputDocument, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0o644) //nolint:nosnakecase if err != nil { return err } diff --git a/x/dclgenutil/client/cli/init.go b/x/dclgenutil/client/cli/init.go index 561d7c0f7..34f28d7cf 100644 --- a/x/dclgenutil/client/cli/init.go +++ b/x/dclgenutil/client/cli/init.go @@ -7,6 +7,11 @@ import ( "os" "path/filepath" + cfg "github.com/cometbft/cometbft/config" + "github.com/cometbft/cometbft/libs/cli" + tmos "github.com/cometbft/cometbft/libs/os" + tmrand "github.com/cometbft/cometbft/libs/rand" + "github.com/cometbft/cometbft/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/flags" "github.com/cosmos/cosmos-sdk/client/input" @@ -16,11 +21,6 @@ import ( "github.com/cosmos/go-bip39" "github.com/pkg/errors" "github.com/spf13/cobra" - cfg "github.com/tendermint/tendermint/config" - "github.com/tendermint/tendermint/libs/cli" - tmos "github.com/tendermint/tendermint/libs/os" - tmrand "github.com/tendermint/tendermint/libs/rand" - "github.com/tendermint/tendermint/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclgenutil" ) @@ -70,7 +70,11 @@ func InitCmd(mbm module.BasicManager, defaultNodeHome string) *cobra.Command { Long: `Initialize validators's and node's configuration files.`, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + cdc := clientCtx.Codec serverCtx := server.GetServerContextFromCmd(cmd) diff --git a/x/dclgenutil/client/cli/init_test.go b/x/dclgenutil/client/cli/init_test.go index ab757f78e..13a6f4097 100644 --- a/x/dclgenutil/client/cli/init_test.go +++ b/x/dclgenutil/client/cli/init_test.go @@ -26,9 +26,9 @@ import ( "github.com/cosmos/cosmos-sdk/x/staking" "github.com/spf13/viper" "github.com/stretchr/testify/require" - abci_server "github.com/tendermint/tendermint/abci/server" - "github.com/tendermint/tendermint/libs/cli" - "github.com/tendermint/tendermint/libs/log" + abci_server "github.com/cometbft/cometbft/abci/server" + "github.com/cometbft/cometbft/libs/cli" + "github.com/cometbft/cometbft/libs/log" ) var testMbm = module.NewBasicManager( diff --git a/x/dclgenutil/client/cli/query.go b/x/dclgenutil/client/cli/query.go index 1c0dcfccd..088eff169 100644 --- a/x/dclgenutil/client/cli/query.go +++ b/x/dclgenutil/client/cli/query.go @@ -13,7 +13,7 @@ import ( ) // GetQueryCmd returns the cli query commands for this module. -func GetQueryCmd(queryRoute string) *cobra.Command { +func GetQueryCmd() *cobra.Command { // Group dclgenutil queries under a subcommand cmd := &cobra.Command{ Use: types.ModuleName, diff --git a/x/dclgenutil/client/cli/validate_genesis.go b/x/dclgenutil/client/cli/validate_genesis.go index b2abeb28d..95e76ea24 100644 --- a/x/dclgenutil/client/cli/validate_genesis.go +++ b/x/dclgenutil/client/cli/validate_genesis.go @@ -4,11 +4,11 @@ import ( "encoding/json" "fmt" + tmtypes "github.com/cometbft/cometbft/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/server" "github.com/cosmos/cosmos-sdk/types/module" "github.com/spf13/cobra" - tmtypes "github.com/tendermint/tendermint/types" ) const chainUpgradeGuide = "https://docs.cosmos.network/master/migrations/chain-upgrade-guide-040.html" @@ -21,7 +21,10 @@ func ValidateGenesisCmd(mbm module.BasicManager) *cobra.Command { Short: "validates the genesis file at the default location or at the location passed as an arg", RunE: func(cmd *cobra.Command, args []string) (err error) { serverCtx := server.GetServerContextFromCmd(cmd) - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } cdc := clientCtx.Codec diff --git a/x/dclgenutil/collect.go b/x/dclgenutil/collect.go index 1f1055e3e..de119ef8e 100644 --- a/x/dclgenutil/collect.go +++ b/x/dclgenutil/collect.go @@ -12,12 +12,12 @@ import ( "sort" "strings" + cfg "github.com/cometbft/cometbft/config" + tmtypes "github.com/cometbft/cometbft/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/codec" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/x/genutil/types" - cfg "github.com/tendermint/tendermint/config" - tmtypes "github.com/tendermint/tendermint/types" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" validatortypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" ) diff --git a/x/dclgenutil/collect_test.go b/x/dclgenutil/collect_test.go index c6d68644c..8712acad5 100644 --- a/x/dclgenutil/collect_test.go +++ b/x/dclgenutil/collect_test.go @@ -15,8 +15,8 @@ import ( bankexported "github.com/cosmos/cosmos-sdk/x/bank/exported" "github.com/cosmos/cosmos-sdk/x/genutil" gtypes "github.com/cosmos/cosmos-sdk/x/genutil/types" - "github.com/gogo/protobuf/proto" - tmtypes "github.com/tendermint/tendermint/types" + "github.com/cosmos/gogoproto/proto" + tmtypes "github.com/cometbft/cometbft/types" ) type doNothingUnmarshalJSON struct { diff --git a/x/dclgenutil/genesis.go b/x/dclgenutil/genesis.go index 72142f158..fc9e4baca 100644 --- a/x/dclgenutil/genesis.go +++ b/x/dclgenutil/genesis.go @@ -1,9 +1,9 @@ package dclgenutil import ( + abci "github.com/cometbft/cometbft/abci/types" "github.com/cosmos/cosmos-sdk/client" sdk "github.com/cosmos/cosmos-sdk/types" - abci "github.com/tendermint/tendermint/abci/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclgenutil/types" ) diff --git a/x/dclgenutil/gentx.go b/x/dclgenutil/gentx.go index f813602bb..77082cbf2 100644 --- a/x/dclgenutil/gentx.go +++ b/x/dclgenutil/gentx.go @@ -3,11 +3,13 @@ package dclgenutil import ( "encoding/json" + "cosmossdk.io/errors" + abci "github.com/cometbft/cometbft/abci/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/codec" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" - abci "github.com/tendermint/tendermint/abci/types" + dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclgenutil/types" ) @@ -63,7 +65,7 @@ func ValidateAccountInGenesis( } if !accountIsInGenesis { - return sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, + return errors.Wrapf(sdkerrors.ErrUnknownRequest, "Error account %s in not in the app_state.accounts array of genesis.json", addr, ) } diff --git a/x/dclgenutil/gentx_test.go b/x/dclgenutil/gentx_test.go index d200e1778..b78db1f47 100644 --- a/x/dclgenutil/gentx_test.go +++ b/x/dclgenutil/gentx_test.go @@ -8,9 +8,9 @@ import ( "testing" "github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1" - "github.com/cosmos/cosmos-sdk/simapp" - "github.com/cosmos/cosmos-sdk/simapp/helpers" - simappparams "github.com/cosmos/cosmos-sdk/simapp/params" + "cosmossdk.io/simapp" + "cosmossdk.io/simapp/helpers" + simappparams "cosmossdk.io/simapp/params" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/x/bank/testutil" banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" @@ -19,7 +19,7 @@ import ( "github.com/cosmos/cosmos-sdk/x/staking" stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" "github.com/stretchr/testify/suite" - tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + tmproto "github.com/cometbft/cometbft/proto/tendermint/types" ) var ( @@ -70,7 +70,7 @@ func (suite *GenTxTestSuite) setAccountBalance(addr sdk.AccAddress, amount int64 suite.Require().NoError(err) bankGenesisState := suite.app.BankKeeper.ExportGenesis(suite.ctx) - bankGenesis, err := suite.encodingConfig.Amino.MarshalJSON(bankGenesisState) // TODO switch this to use Marshaler + bankGenesis, err := suite.encodingConfig.Amino.MarshalJSON(bankGenesisState) // TODO switch this to use Codec suite.Require().NoError(err) return bankGenesis @@ -184,7 +184,7 @@ func (suite *GenTxTestSuite) TestValidateAccountInGenesis() { suite.app.StakingKeeper.SetParams(suite.ctx, stakingtypes.DefaultParams()) stakingGenesisState := staking.ExportGenesis(suite.ctx, suite.app.StakingKeeper) suite.Require().Equal(stakingGenesisState.Params, stakingtypes.DefaultParams()) - stakingGenesis, err := cdc.MarshalJSON(stakingGenesisState) // TODO switch this to use Marshaler + stakingGenesis, err := cdc.MarshalJSON(stakingGenesisState) // TODO switch this to use Codec suite.Require().NoError(err) appGenesisState[stakingtypes.ModuleName] = stakingGenesis diff --git a/x/dclgenutil/module.go b/x/dclgenutil/module.go index 80e808f45..6e42ea24d 100644 --- a/x/dclgenutil/module.go +++ b/x/dclgenutil/module.go @@ -11,10 +11,10 @@ import ( "github.com/cosmos/cosmos-sdk/types/module" // this line is used by starport scaffolding # 1. + abci "github.com/cometbft/cometbft/abci/types" "github.com/gorilla/mux" "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" - abci "github.com/tendermint/tendermint/abci/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclgenutil/types" ) @@ -35,7 +35,7 @@ func (AppModuleBasic) Name() string { return types.ModuleName } -func (AppModuleBasic) RegisterLegacyAminoCodec(cdc *codec.LegacyAmino) {} +func (AppModuleBasic) RegisterLegacyAminoCodec(_ *codec.LegacyAmino) {} // RegisterInterfaces registers the module's interface types. func (a AppModuleBasic) RegisterInterfaces(_ cdctypes.InterfaceRegistry) {} @@ -115,7 +115,7 @@ func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONCodec, gs json.Ra } // ExportGenesis returns the dclgenutil module's exported genesis state as raw JSON bytes. -func (am AppModule) ExportGenesis(ctx sdk.Context, cdc codec.JSONCodec) json.RawMessage { +func (am AppModule) ExportGenesis(_ sdk.Context, cdc codec.JSONCodec) json.RawMessage { return am.DefaultGenesis(cdc) } diff --git a/x/dclgenutil/types/_genesis_state_test.go b/x/dclgenutil/types/_genesis_state_test.go index a704a58af..3bfafc351 100644 --- a/x/dclgenutil/types/_genesis_state_test.go +++ b/x/dclgenutil/types/_genesis_state_test.go @@ -11,7 +11,7 @@ import ( "github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" - "github.com/cosmos/cosmos-sdk/simapp" + "cosmossdk.io/simapp" sdk "github.com/cosmos/cosmos-sdk/types" banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" "github.com/cosmos/cosmos-sdk/x/genutil/types" diff --git a/x/dclgenutil/types/expected_keepers.go b/x/dclgenutil/types/expected_keepers.go index fdcad9a2b..e4bb125b0 100644 --- a/x/dclgenutil/types/expected_keepers.go +++ b/x/dclgenutil/types/expected_keepers.go @@ -3,9 +3,9 @@ package types import ( "encoding/json" + abci "github.com/cometbft/cometbft/abci/types" "github.com/cosmos/cosmos-sdk/codec" sdk "github.com/cosmos/cosmos-sdk/types" - abci "github.com/tendermint/tendermint/abci/types" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) diff --git a/x/dclgenutil/types/genesis.go b/x/dclgenutil/types/genesis.go index 400586023..956f54282 100644 --- a/x/dclgenutil/types/genesis.go +++ b/x/dclgenutil/types/genesis.go @@ -5,10 +5,10 @@ import ( "errors" "fmt" + tmos "github.com/cometbft/cometbft/libs/os" + tmtypes "github.com/cometbft/cometbft/types" "github.com/cosmos/cosmos-sdk/codec" sdk "github.com/cosmos/cosmos-sdk/types" - tmos "github.com/tendermint/tendermint/libs/os" - tmtypes "github.com/tendermint/tendermint/types" validatortypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" ) diff --git a/x/dclgenutil/types/genesis.pb.go b/x/dclgenutil/types/genesis.pb.go index 20e0cc1d2..1f1b91e9e 100644 --- a/x/dclgenutil/types/genesis.pb.go +++ b/x/dclgenutil/types/genesis.pb.go @@ -1,13 +1,13 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclgenutil/genesis.proto +// source: zigbeealliance/distributedcomplianceledger/dclgenutil/genesis.proto package types import ( encoding_json "encoding/json" fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -34,7 +34,7 @@ func (m *GenesisState) Reset() { *m = GenesisState{} } func (m *GenesisState) String() string { return proto.CompactTextString(m) } func (*GenesisState) ProtoMessage() {} func (*GenesisState) Descriptor() ([]byte, []int) { - return fileDescriptor_72454a08e2a56bab, []int{0} + return fileDescriptor_8a376931cce565cf, []int{0} } func (m *GenesisState) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -74,26 +74,28 @@ func init() { proto.RegisterType((*GenesisState)(nil), "zigbeealliance.distributedcomplianceledger.dclgenutil.GenesisState") } -func init() { proto.RegisterFile("dclgenutil/genesis.proto", fileDescriptor_72454a08e2a56bab) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclgenutil/genesis.proto", fileDescriptor_8a376931cce565cf) +} -var fileDescriptor_72454a08e2a56bab = []byte{ - // 251 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x4c, 0xd0, 0x31, 0x4b, 0x03, 0x31, - 0x14, 0xc0, 0xf1, 0x1e, 0x42, 0x85, 0xa3, 0x53, 0x71, 0x38, 0x1c, 0x52, 0x71, 0x10, 0x97, 0x4b, - 0x06, 0x71, 0x16, 0xba, 0x38, 0x88, 0x08, 0xd5, 0xc9, 0x45, 0x72, 0xc9, 0xe3, 0x19, 0x49, 0x93, - 0xe3, 0xde, 0x3b, 0x3c, 0xfd, 0x14, 0x7e, 0x2c, 0xc7, 0x8e, 0x4e, 0x45, 0xee, 0x36, 0x3f, 0x82, - 0x93, 0xb4, 0x41, 0xda, 0x2d, 0x84, 0xc7, 0x9f, 0xf7, 0x7b, 0x79, 0x61, 0x8d, 0x47, 0x08, 0x2d, - 0x3b, 0xaf, 0x10, 0x02, 0x90, 0x23, 0x59, 0x37, 0x91, 0xe3, 0xf4, 0xf2, 0xdd, 0x61, 0x05, 0xa0, - 0xbd, 0x77, 0x3a, 0x18, 0x90, 0xd6, 0x11, 0x37, 0xae, 0x6a, 0x19, 0xac, 0x89, 0xcb, 0x3a, 0xfd, - 0x7a, 0xb0, 0x08, 0x8d, 0xdc, 0x45, 0x8e, 0x8f, 0x30, 0x62, 0xdc, 0x16, 0xd4, 0xe6, 0x95, 0x62, - 0xa7, 0x77, 0xf9, 0xe4, 0x3a, 0xd5, 0xef, 0x59, 0x33, 0x4c, 0xaf, 0xf2, 0x43, 0x84, 0xf0, 0xc4, - 0x1d, 0x15, 0xd9, 0xc9, 0xc1, 0xf9, 0x64, 0x7e, 0xf6, 0xb3, 0x9e, 0x8d, 0x11, 0x02, 0x77, 0xf4, - 0xbb, 0x9e, 0x15, 0x10, 0x4c, 0xb4, 0x2e, 0xa0, 0x7a, 0xa1, 0x18, 0xe4, 0x42, 0xbf, 0xde, 0x02, - 0x91, 0x46, 0x58, 0x6c, 0x66, 0x1e, 0x3a, 0x9a, 0xc3, 0x67, 0x2f, 0xb2, 0x55, 0x2f, 0xb2, 0xef, - 0x5e, 0x64, 0x1f, 0x83, 0x18, 0xad, 0x06, 0x31, 0xfa, 0x1a, 0xc4, 0xe8, 0xf1, 0x06, 0x1d, 0x3f, - 0xb7, 0x95, 0x34, 0x71, 0xa9, 0x12, 0xa1, 0xfc, 0x37, 0xa8, 0x3d, 0x43, 0xb9, 0x43, 0x94, 0x49, - 0xa1, 0x3a, 0xb5, 0x77, 0x0c, 0x7e, 0xab, 0x81, 0xaa, 0xf1, 0x76, 0xfd, 0x8b, 0xbf, 0x00, 0x00, - 0x00, 0xff, 0xff, 0x14, 0xce, 0x8b, 0x18, 0x27, 0x01, 0x00, 0x00, +var fileDescriptor_8a376931cce565cf = []byte{ + // 255 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0xae, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0xa7, 0x24, 0xe7, 0xa4, 0xa7, 0xe6, 0x95, 0x96, 0x64, 0xe6, 0xe8, 0xa7, 0xa7, + 0xe6, 0xa5, 0x16, 0x67, 0x16, 0xeb, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x99, 0xa2, 0x1a, 0xa2, + 0x87, 0xc7, 0x10, 0x3d, 0x84, 0x21, 0x52, 0x22, 0xe9, 0xf9, 0xe9, 0xf9, 0x60, 0x13, 0xf4, 0x41, + 0x2c, 0x88, 0x61, 0x4a, 0xfe, 0x5c, 0x3c, 0xee, 0x10, 0xd3, 0x83, 0x4b, 0x12, 0x4b, 0x52, 0x85, + 0xec, 0xb9, 0xd8, 0xd3, 0x53, 0xf3, 0xe2, 0x4b, 0x2a, 0x8a, 0x25, 0x18, 0x15, 0x98, 0x35, 0x78, + 0x9c, 0xd4, 0x5e, 0xdd, 0x93, 0x67, 0x4b, 0x4f, 0xcd, 0x2b, 0xa9, 0x28, 0xfe, 0x75, 0x4f, 0x5e, + 0x22, 0x35, 0x2f, 0x39, 0x3f, 0x25, 0x33, 0x2f, 0x5d, 0x3f, 0xab, 0x38, 0x3f, 0x4f, 0x2f, 0x28, + 0xb1, 0xdc, 0x37, 0xb5, 0xb8, 0x38, 0x31, 0x3d, 0x35, 0x08, 0xa4, 0x26, 0xa4, 0xa2, 0xd8, 0x29, + 0xf5, 0xc4, 0x23, 0x39, 0xc6, 0x0b, 0x8f, 0xe4, 0x18, 0x1f, 0x3c, 0x92, 0x63, 0x9c, 0xf0, 0x58, + 0x8e, 0xe1, 0xc2, 0x63, 0x39, 0x86, 0x1b, 0x8f, 0xe5, 0x18, 0xa2, 0xbc, 0xd3, 0x33, 0x4b, 0x32, + 0x4a, 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, 0x21, 0x5e, 0xd0, 0xc5, 0x16, 0x10, 0xba, 0x08, 0x4f, + 0xe8, 0x42, 0x83, 0xa2, 0x02, 0x39, 0x30, 0x4a, 0x2a, 0x0b, 0x52, 0x8b, 0x93, 0xd8, 0xc0, 0xce, + 0x37, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0xec, 0xda, 0x10, 0x57, 0x52, 0x01, 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { diff --git a/x/dclgenutil/types/query.pb.go b/x/dclgenutil/types/query.pb.go index fcfdcad2a..997d5f72f 100644 --- a/x/dclgenutil/types/query.pb.go +++ b/x/dclgenutil/types/query.pb.go @@ -1,13 +1,13 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclgenutil/query.proto +// source: zigbeealliance/distributedcomplianceledger/dclgenutil/query.proto package types import ( context "context" fmt "fmt" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" math "math" @@ -24,23 +24,25 @@ var _ = math.Inf // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package -func init() { proto.RegisterFile("dclgenutil/query.proto", fileDescriptor_3af04d6515a37d3f) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclgenutil/query.proto", fileDescriptor_405203a00d51f2d1) +} -var fileDescriptor_3af04d6515a37d3f = []byte{ - // 195 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x4c, 0xcf, 0x3d, 0x4e, 0xc4, 0x30, - 0x10, 0x86, 0xe1, 0xa4, 0x00, 0xa4, 0x94, 0x14, 0x14, 0x08, 0xf9, 0x06, 0xb1, 0x25, 0x10, 0x17, - 0xa0, 0xa5, 0xa2, 0xa5, 0xf3, 0xcf, 0xc8, 0x8c, 0xe4, 0x78, 0x8c, 0x33, 0x96, 0x08, 0xa7, 0xe0, - 0x58, 0x94, 0x29, 0xb7, 0x5c, 0x25, 0x17, 0x59, 0xed, 0x5a, 0xab, 0xa4, 0x9d, 0xe2, 0x7b, 0x9f, - 0xe9, 0x1e, 0x9c, 0x0d, 0x1e, 0x62, 0x61, 0x0c, 0xea, 0xbb, 0x40, 0x9e, 0x64, 0xca, 0xc4, 0x74, - 0xff, 0xfa, 0x8b, 0xde, 0x00, 0xe8, 0x10, 0x50, 0x47, 0x0b, 0xd2, 0xe1, 0xc8, 0x19, 0x4d, 0x61, - 0x70, 0x96, 0x86, 0x54, 0xaf, 0x01, 0x9c, 0x87, 0x2c, 0xb7, 0x89, 0xc7, 0x27, 0x4f, 0xe4, 0x03, - 0x28, 0x9d, 0x50, 0xe9, 0x18, 0x89, 0x35, 0x23, 0xc5, 0xb1, 0x8e, 0x3e, 0xdf, 0x75, 0x37, 0x1f, - 0xe7, 0xc6, 0x1b, 0xfc, 0x2f, 0xa2, 0x9d, 0x17, 0xd1, 0x1e, 0x17, 0xd1, 0xfe, 0xad, 0xa2, 0x99, - 0x57, 0xd1, 0x1c, 0x56, 0xd1, 0x7c, 0xbe, 0x7b, 0xe4, 0xaf, 0x62, 0xa4, 0xa5, 0x41, 0x55, 0x42, - 0x7f, 0x35, 0xa8, 0x9d, 0xa1, 0xdf, 0x10, 0x7d, 0x55, 0xa8, 0x1f, 0xb5, 0x7b, 0x85, 0xa7, 0x04, - 0xa3, 0xb9, 0xbd, 0x64, 0x5f, 0x4e, 0x01, 0x00, 0x00, 0xff, 0xff, 0x5c, 0x53, 0x41, 0x67, 0xe5, - 0x00, 0x00, 0x00, +var fileDescriptor_405203a00d51f2d1 = []byte{ + // 197 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0xac, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0xa7, 0x24, 0xe7, 0xa4, 0xa7, 0xe6, 0x95, 0x96, 0x64, 0xe6, 0xe8, 0x17, 0x96, + 0xa6, 0x16, 0x55, 0xea, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x99, 0xa2, 0x1a, 0xa1, 0x87, 0xc7, + 0x08, 0x3d, 0x84, 0x11, 0x52, 0x32, 0xe9, 0xf9, 0xf9, 0xe9, 0x39, 0xa9, 0xfa, 0x89, 0x05, 0x99, + 0xfa, 0x89, 0x79, 0x79, 0xf9, 0x25, 0x89, 0x25, 0x99, 0xf9, 0x79, 0xc5, 0x10, 0x43, 0x8d, 0xd8, + 0xb9, 0x58, 0x03, 0x41, 0x76, 0x38, 0xa5, 0x9e, 0x78, 0x24, 0xc7, 0x78, 0xe1, 0x91, 0x1c, 0xe3, + 0x83, 0x47, 0x72, 0x8c, 0x13, 0x1e, 0xcb, 0x31, 0x5c, 0x78, 0x2c, 0xc7, 0x70, 0xe3, 0xb1, 0x1c, + 0x43, 0x94, 0x77, 0x7a, 0x66, 0x49, 0x46, 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x3e, 0xc4, 0x09, + 0xba, 0xd8, 0xbc, 0xa1, 0x8b, 0x70, 0x84, 0x2e, 0xd4, 0x23, 0x15, 0xc8, 0x5e, 0x29, 0xa9, 0x2c, + 0x48, 0x2d, 0x4e, 0x62, 0x03, 0x5b, 0x6b, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0x06, 0x8e, 0x75, + 0x08, 0x10, 0x01, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -82,5 +84,5 @@ var _Query_serviceDesc = grpc.ServiceDesc{ HandlerType: (*QueryServer)(nil), Methods: []grpc.MethodDesc{}, Streams: []grpc.StreamDesc{}, - Metadata: "dclgenutil/query.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/dclgenutil/query.proto", } diff --git a/x/dclgenutil/types/tx.pb.go b/x/dclgenutil/types/tx.pb.go index ffbf4d5cb..f0becb950 100644 --- a/x/dclgenutil/types/tx.pb.go +++ b/x/dclgenutil/types/tx.pb.go @@ -1,13 +1,13 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclgenutil/tx.proto +// source: zigbeealliance/distributedcomplianceledger/dclgenutil/tx.proto package types import ( context "context" fmt "fmt" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" grpc "google.golang.org/grpc" math "math" ) @@ -23,21 +23,23 @@ var _ = math.Inf // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package -func init() { proto.RegisterFile("dclgenutil/tx.proto", fileDescriptor_7ace9c235adb391f) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclgenutil/tx.proto", fileDescriptor_6fad35033dd94b88) +} -var fileDescriptor_7ace9c235adb391f = []byte{ - // 165 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4e, 0x49, 0xce, 0x49, - 0x4f, 0xcd, 0x2b, 0x2d, 0xc9, 0xcc, 0xd1, 0x2f, 0xa9, 0xd0, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, - 0x32, 0xad, 0xca, 0x4c, 0x4f, 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, - 0x4b, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, - 0x88, 0xe6, 0xa4, 0xa6, 0xa4, 0xa7, 0x16, 0xe9, 0x21, 0xf4, 0x1b, 0xb1, 0x72, 0x31, 0xfb, 0x16, - 0xa7, 0x3b, 0xa5, 0x9e, 0x78, 0x24, 0xc7, 0x78, 0xe1, 0x91, 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, - 0x13, 0x1e, 0xcb, 0x31, 0x5c, 0x78, 0x2c, 0xc7, 0x70, 0xe3, 0xb1, 0x1c, 0x43, 0x94, 0x77, 0x7a, - 0x66, 0x49, 0x46, 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x3e, 0xc4, 0x0a, 0x5d, 0x98, 0x1d, 0xfa, - 0x48, 0x76, 0xe8, 0x22, 0x2c, 0xd1, 0x85, 0xd8, 0xa2, 0x5f, 0xa1, 0x8f, 0xec, 0xce, 0xca, 0x82, - 0xd4, 0xe2, 0x24, 0x36, 0xb0, 0x5b, 0x8d, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0xa2, 0x16, 0xbc, - 0x02, 0xc2, 0x00, 0x00, 0x00, +var fileDescriptor_6fad35033dd94b88 = []byte{ + // 166 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0xab, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0xa7, 0x24, 0xe7, 0xa4, 0xa7, 0xe6, 0x95, 0x96, 0x64, 0xe6, 0xe8, 0x97, 0x54, + 0xe8, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x99, 0xa2, 0xea, 0xd7, 0xc3, 0xa3, 0x5f, 0x0f, 0xa1, + 0xdf, 0x88, 0x95, 0x8b, 0xd9, 0xb7, 0x38, 0xdd, 0x29, 0xf5, 0xc4, 0x23, 0x39, 0xc6, 0x0b, 0x8f, + 0xe4, 0x18, 0x1f, 0x3c, 0x92, 0x63, 0x9c, 0xf0, 0x58, 0x8e, 0xe1, 0xc2, 0x63, 0x39, 0x86, 0x1b, + 0x8f, 0xe5, 0x18, 0xa2, 0xbc, 0xd3, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, + 0x21, 0x56, 0xe8, 0x62, 0x73, 0xa3, 0x2e, 0xc2, 0x12, 0x5d, 0xa8, 0x2b, 0x2b, 0x50, 0xdc, 0x59, + 0x59, 0x90, 0x5a, 0x9c, 0xc4, 0x06, 0x76, 0xab, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0x58, 0x47, + 0xcc, 0xd0, 0xed, 0x00, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -79,5 +81,5 @@ var _Msg_serviceDesc = grpc.ServiceDesc{ HandlerType: (*MsgServer)(nil), Methods: []grpc.MethodDesc{}, Streams: []grpc.StreamDesc{}, - Metadata: "dclgenutil/tx.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/dclgenutil/tx.proto", } diff --git a/x/dclgenutil/utils_test.go b/x/dclgenutil/utils_test.go index 93e39b7be..2fa75d1ea 100644 --- a/x/dclgenutil/utils_test.go +++ b/x/dclgenutil/utils_test.go @@ -10,9 +10,9 @@ import ( "time" "github.com/stretchr/testify/require" - "github.com/tendermint/tendermint/config" - tmed25519 "github.com/tendermint/tendermint/crypto/ed25519" - "github.com/tendermint/tendermint/privval" + "github.com/cometbft/cometbft/config" + tmed25519 "github.com/cometbft/cometbft/crypto/ed25519" + "github.com/cometbft/cometbft/privval" ) func TestExportGenesisFileWithTime(t *testing.T) { diff --git a/x/dclupgrade/client/cli/query.go b/x/dclupgrade/client/cli/query.go index 8a6c9868f..e656eb180 100644 --- a/x/dclupgrade/client/cli/query.go +++ b/x/dclupgrade/client/cli/query.go @@ -14,7 +14,7 @@ import ( ) // GetQueryCmd returns the cli query commands for this module. -func GetQueryCmd(queryRoute string) *cobra.Command { +func GetQueryCmd(_ string) *cobra.Command { // Group dclupgrade queries under a subcommand cmd := &cobra.Command{ Use: types.ModuleName, diff --git a/x/dclupgrade/client/cli/query_approved_upgrade.go b/x/dclupgrade/client/cli/query_approved_upgrade.go index 32a40623e..5bf70341a 100644 --- a/x/dclupgrade/client/cli/query_approved_upgrade.go +++ b/x/dclupgrade/client/cli/query_approved_upgrade.go @@ -57,7 +57,10 @@ func CmdShowApprovedUpgrade() *cobra.Command { Short: "Query approved upgrade by name", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.ApprovedUpgrade diff --git a/x/dclupgrade/client/cli/query_approved_upgrade_test.go b/x/dclupgrade/client/cli/query_approved_upgrade_test.go index 3684dc089..eee85e884 100644 --- a/x/dclupgrade/client/cli/query_approved_upgrade_test.go +++ b/x/dclupgrade/client/cli/query_approved_upgrade_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" diff --git a/x/dclupgrade/client/cli/query_proposed_upgrade.go b/x/dclupgrade/client/cli/query_proposed_upgrade.go index d23979e83..38c71c1f5 100644 --- a/x/dclupgrade/client/cli/query_proposed_upgrade.go +++ b/x/dclupgrade/client/cli/query_proposed_upgrade.go @@ -57,7 +57,10 @@ func CmdShowProposedUpgrade() *cobra.Command { Short: "Query proposed upgrade by name", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.ProposedUpgrade diff --git a/x/dclupgrade/client/cli/query_proposed_upgrade_test.go b/x/dclupgrade/client/cli/query_proposed_upgrade_test.go index 522060b6c..a4880ff78 100644 --- a/x/dclupgrade/client/cli/query_proposed_upgrade_test.go +++ b/x/dclupgrade/client/cli/query_proposed_upgrade_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" diff --git a/x/dclupgrade/client/cli/query_rejected_upgrade.go b/x/dclupgrade/client/cli/query_rejected_upgrade.go index 88af31ba8..a51efd93a 100644 --- a/x/dclupgrade/client/cli/query_rejected_upgrade.go +++ b/x/dclupgrade/client/cli/query_rejected_upgrade.go @@ -15,7 +15,10 @@ func CmdListRejectedUpgrade() *cobra.Command { Use: "all-rejected-upgrades", Short: "Query the list of all rejected upgrades", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -51,7 +54,10 @@ func CmdShowRejectedUpgrade() *cobra.Command { Short: "Query rejected upgrade by name", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.RejectedUpgrade diff --git a/x/dclupgrade/client/cli/query_rejected_upgrade_test.go b/x/dclupgrade/client/cli/query_rejected_upgrade_test.go index a273a8f80..ad758984a 100644 --- a/x/dclupgrade/client/cli/query_rejected_upgrade_test.go +++ b/x/dclupgrade/client/cli/query_rejected_upgrade_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/x/dclupgrade/handler.go b/x/dclupgrade/handler.go index 679c4efbb..fd5b62674 100644 --- a/x/dclupgrade/handler.go +++ b/x/dclupgrade/handler.go @@ -3,6 +3,7 @@ package dclupgrade import ( "fmt" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/keeper" @@ -33,7 +34,7 @@ func NewHandler(k keeper.Keeper) sdk.Handler { default: errMsg := fmt.Sprintf("unrecognized %s message type: %T", types.ModuleName, msg) - return nil, sdkerrors.Wrap(sdkerrors.ErrUnknownRequest, errMsg) + return nil, errors.Wrap(sdkerrors.ErrUnknownRequest, errMsg) } } } diff --git a/x/dclupgrade/keeper/keeper.go b/x/dclupgrade/keeper/keeper.go index 85d0371c8..589aa4ad8 100644 --- a/x/dclupgrade/keeper/keeper.go +++ b/x/dclupgrade/keeper/keeper.go @@ -4,9 +4,10 @@ import ( "fmt" "math" - "github.com/tendermint/tendermint/libs/log" + "github.com/cometbft/cometbft/libs/log" "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types" ) @@ -14,8 +15,8 @@ import ( type ( Keeper struct { cdc codec.BinaryCodec - storeKey sdk.StoreKey - memKey sdk.StoreKey + storeKey storetypes.StoreKey + memKey storetypes.StoreKey dclauthKeeper types.DclauthKeeper upgradeKeeper types.UpgradeKeeper @@ -25,7 +26,7 @@ type ( func NewKeeper( cdc codec.BinaryCodec, storeKey, - memKey sdk.StoreKey, + memKey storetypes.StoreKey, dclauthKeeper types.DclauthKeeper, upgradeKeeper types.UpgradeKeeper, diff --git a/x/dclupgrade/keeper/msg_server_approve_upgrade.go b/x/dclupgrade/keeper/msg_server_approve_upgrade.go index 14d74c997..74ea5f618 100644 --- a/x/dclupgrade/keeper/msg_server_approve_upgrade.go +++ b/x/dclupgrade/keeper/msg_server_approve_upgrade.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types" @@ -13,12 +14,12 @@ func (k msgServer) ApproveUpgrade(goCtx context.Context, msg *types.MsgApproveUp creatorAddr, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if message creator has enough rights to approve upgrade if !k.dclauthKeeper.HasRole(ctx, creatorAddr, types.UpgradeApprovalRole) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgApproveUpgrade transaction should be signed by an account with the %s role", types.UpgradeApprovalRole, ) @@ -32,7 +33,7 @@ func (k msgServer) ApproveUpgrade(goCtx context.Context, msg *types.MsgApproveUp // check if proposed upgrade already has approval form message creator if proposedUpgrade.HasApprovalFrom(creatorAddr) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "Proposed upgrade with name=%v already has approval from=%v", msg.Name, msg.Creator, ) diff --git a/x/dclupgrade/keeper/msg_server_propose_upgrade.go b/x/dclupgrade/keeper/msg_server_propose_upgrade.go index 65d7ff58a..64cee40b2 100644 --- a/x/dclupgrade/keeper/msg_server_propose_upgrade.go +++ b/x/dclupgrade/keeper/msg_server_propose_upgrade.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types" @@ -13,12 +14,12 @@ func (k msgServer) ProposeUpgrade(goCtx context.Context, msg *types.MsgProposeUp creatorAddr, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if message creator has enough rights to propose upgrade if !k.dclauthKeeper.HasRole(ctx, creatorAddr, types.UpgradeApprovalRole) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgProposeUpgrade transaction should be signed by an account with the %s role", types.UpgradeApprovalRole, ) diff --git a/x/dclupgrade/keeper/msg_server_reject_upgrade.go b/x/dclupgrade/keeper/msg_server_reject_upgrade.go index 3bfda0bad..b67d36b24 100644 --- a/x/dclupgrade/keeper/msg_server_reject_upgrade.go +++ b/x/dclupgrade/keeper/msg_server_reject_upgrade.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types" @@ -13,12 +14,12 @@ func (k msgServer) RejectUpgrade(goCtx context.Context, msg *types.MsgRejectUpgr creatorAddr, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if message creator has enough rights to reject upgrade if !k.dclauthKeeper.HasRole(ctx, creatorAddr, types.UpgradeApprovalRole) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgApproveUpgrade transaction should be signed by an account with the %s role", types.UpgradeApprovalRole, ) @@ -32,7 +33,7 @@ func (k msgServer) RejectUpgrade(goCtx context.Context, msg *types.MsgRejectUpgr // check if proposed upgrade already has reject from message creator if proposedUpgrade.HasRejectFrom(creatorAddr) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "Proposed upgrade with name=%v already has reject from=%v", msg.Name, msg.Creator, ) diff --git a/x/dclupgrade/keeper/msg_server_test.go b/x/dclupgrade/keeper/msg_server_test.go index ed6f922aa..e7fded3f4 100644 --- a/x/dclupgrade/keeper/msg_server_test.go +++ b/x/dclupgrade/keeper/msg_server_test.go @@ -10,7 +10,7 @@ import ( "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types" ) -//nolint:deadcode,unused +//nolint:unused func setupMsgServer(tb testing.TB) (types.MsgServer, context.Context) { tb.Helper() k, ctx := keepertest.DclupgradeKeeper(tb, nil, nil) diff --git a/x/dclupgrade/module.go b/x/dclupgrade/module.go index 0d90236f3..a5eb8811e 100644 --- a/x/dclupgrade/module.go +++ b/x/dclupgrade/module.go @@ -5,17 +5,18 @@ import ( "encoding/json" "fmt" + abci "github.com/cometbft/cometbft/abci/types" "github.com/gorilla/mux" "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" - abci "github.com/tendermint/tendermint/abci/types" - + "cosmossdk.io/core/appmodule" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/codec" cdctypes "github.com/cosmos/cosmos-sdk/codec/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/module" + "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/client/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types" @@ -63,7 +64,7 @@ func (AppModuleBasic) DefaultGenesis(cdc codec.JSONCodec) json.RawMessage { } // ValidateGenesis performs genesis state validation for the capability module. -func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncodingConfig, bz json.RawMessage) error { +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, _ client.TxEncodingConfig, bz json.RawMessage) error { var genState types.GenesisState if err := cdc.UnmarshalJSON(bz, &genState); err != nil { return fmt.Errorf("failed to unmarshal %s genesis state: %w", types.ModuleName, err) @@ -73,7 +74,7 @@ func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncod } // RegisterRESTRoutes registers the capability module's REST service handlers. -func (AppModuleBasic) RegisterRESTRoutes(clientCtx client.Context, rtr *mux.Router) { +func (AppModuleBasic) RegisterRESTRoutes(_ client.Context, _ *mux.Router) { } // RegisterGRPCGatewayRoutes registers the gRPC Gateway routes for the module. @@ -109,23 +110,16 @@ func NewAppModule(cdc codec.Codec, keeper keeper.Keeper) AppModule { } } +var _ appmodule.AppModule = AppModule{} + // Name returns the capability module's name. func (am AppModule) Name() string { return am.AppModuleBasic.Name() } -// Route returns the capability module's message routing key. -func (am AppModule) Route() sdk.Route { - return sdk.NewRoute(types.RouterKey, NewHandler(am.keeper)) -} - -// QuerierRoute returns the capability module's query routing key. -func (AppModule) QuerierRoute() string { return types.QuerierRoute } +func (am AppModule) IsOnePerModuleType() {} -// LegacyQuerierHandler returns the capability module's Querier. -func (am AppModule) LegacyQuerierHandler(legacyQuerierCdc *codec.LegacyAmino) sdk.Querier { - return nil -} +func (am AppModule) IsAppModule() {} // RegisterServices registers a GRPC query service to respond to the // module-specific GRPC queries. diff --git a/x/dclupgrade/module_simulation.go b/x/dclupgrade/module_simulation.go index 325facc53..d15d1c771 100644 --- a/x/dclupgrade/module_simulation.go +++ b/x/dclupgrade/module_simulation.go @@ -4,7 +4,6 @@ import ( "math/rand" "github.com/cosmos/cosmos-sdk/baseapp" - simappparams "github.com/cosmos/cosmos-sdk/simapp/params" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/module" simtypes "github.com/cosmos/cosmos-sdk/types/simulation" @@ -18,7 +17,6 @@ import ( var ( _ = sample.AccAddress _ = dclupgradesimulation.FindAccount - _ = simappparams.StakePerAccount _ = simulation.MsgEntryKind _ = baseapp.Paramspace ) @@ -52,15 +50,10 @@ func (AppModule) GenerateGenesisState(simState *module.SimulationState) { } // ProposalContents doesn't return any content functions for governance proposals. -func (AppModule) ProposalContents(_ module.SimulationState) []simtypes.WeightedProposalContent { +func (AppModule) ProposalContents(_ module.SimulationState) []simtypes.WeightedProposalContent { //nolint:staticcheck return nil } -// RandomizedParams creates randomized param changes for the simulator. -func (am AppModule) RandomizedParams(_ *rand.Rand) []simtypes.ParamChange { - return []simtypes.ParamChange{} -} - // RegisterStoreDecoder registers a decoder. func (am AppModule) RegisterStoreDecoder(_ sdk.StoreDecoderRegistry) {} diff --git a/x/dclupgrade/simulation/approve_upgrade.go b/x/dclupgrade/simulation/approve_upgrade.go index d7f711448..4668b7472 100644 --- a/x/dclupgrade/simulation/approve_upgrade.go +++ b/x/dclupgrade/simulation/approve_upgrade.go @@ -10,7 +10,7 @@ import ( "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types" ) -func SimulateMsgApproveUpgrade(k keeper.Keeper) simtypes.Operation { +func SimulateMsgApproveUpgrade(_ keeper.Keeper) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { simAccount, _ := simtypes.RandomAcc(r, accs) diff --git a/x/dclupgrade/simulation/propose_upgrade.go b/x/dclupgrade/simulation/propose_upgrade.go index ba6342911..bc05fab0d 100644 --- a/x/dclupgrade/simulation/propose_upgrade.go +++ b/x/dclupgrade/simulation/propose_upgrade.go @@ -10,7 +10,7 @@ import ( "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclupgrade/types" ) -func SimulateMsgProposeUpgrade(k keeper.Keeper) simtypes.Operation { +func SimulateMsgProposeUpgrade(_ keeper.Keeper) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { simAccount, _ := simtypes.RandomAcc(r, accs) diff --git a/x/dclupgrade/simulation/reject_upgrade.go b/x/dclupgrade/simulation/reject_upgrade.go index 5c047f28c..6c87f92b9 100644 --- a/x/dclupgrade/simulation/reject_upgrade.go +++ b/x/dclupgrade/simulation/reject_upgrade.go @@ -11,7 +11,7 @@ import ( ) func SimulateMsgRejectUpgrade( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/dclupgrade/types/approved_upgrade.pb.go b/x/dclupgrade/types/approved_upgrade.pb.go index b965ed2f9..a987bd2c4 100644 --- a/x/dclupgrade/types/approved_upgrade.pb.go +++ b/x/dclupgrade/types/approved_upgrade.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclupgrade/approved_upgrade.proto +// source: zigbeealliance/distributedcomplianceledger/dclupgrade/approved_upgrade.proto package types @@ -7,8 +7,8 @@ import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" types "github.com/cosmos/cosmos-sdk/x/upgrade/types" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -36,7 +36,7 @@ func (m *ApprovedUpgrade) Reset() { *m = ApprovedUpgrade{} } func (m *ApprovedUpgrade) String() string { return proto.CompactTextString(m) } func (*ApprovedUpgrade) ProtoMessage() {} func (*ApprovedUpgrade) Descriptor() ([]byte, []int) { - return fileDescriptor_1eadd88919c82ffc, []int{0} + return fileDescriptor_fb4c43c47cec3bb3, []int{0} } func (m *ApprovedUpgrade) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -97,33 +97,35 @@ func init() { proto.RegisterType((*ApprovedUpgrade)(nil), "zigbeealliance.distributedcomplianceledger.dclupgrade.ApprovedUpgrade") } -func init() { proto.RegisterFile("dclupgrade/approved_upgrade.proto", fileDescriptor_1eadd88919c82ffc) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclupgrade/approved_upgrade.proto", fileDescriptor_fb4c43c47cec3bb3) +} -var fileDescriptor_1eadd88919c82ffc = []byte{ - // 354 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x91, 0xbd, 0x4e, 0x2a, 0x41, - 0x14, 0x80, 0x77, 0x81, 0x5c, 0xc2, 0x52, 0xdc, 0x64, 0x43, 0x6e, 0xf6, 0x92, 0x9b, 0xbd, 0x68, - 0x2c, 0x68, 0x76, 0x26, 0x60, 0xb4, 0xb2, 0x81, 0xc6, 0xc2, 0xc6, 0x60, 0xb4, 0xa0, 0x21, 0xb3, - 0x33, 0x27, 0xe3, 0x9a, 0x61, 0x67, 0x32, 0x33, 0x10, 0xf5, 0x29, 0x7c, 0x15, 0x13, 0x1f, 0x82, - 0x92, 0x58, 0x59, 0x19, 0x03, 0x2f, 0x62, 0x60, 0x76, 0x03, 0xb6, 0xc6, 0x6e, 0xe7, 0x9c, 0xb3, - 0xdf, 0x77, 0x7e, 0x82, 0x03, 0x46, 0xc5, 0x4c, 0x71, 0x4d, 0x18, 0x60, 0xa2, 0x94, 0x96, 0x73, - 0x60, 0x93, 0x22, 0x80, 0x94, 0x96, 0x56, 0x86, 0x27, 0x8f, 0x19, 0x4f, 0x01, 0x88, 0x10, 0x19, - 0xc9, 0x29, 0x20, 0x96, 0x19, 0xab, 0xb3, 0x74, 0x66, 0x81, 0x51, 0x39, 0x55, 0x2e, 0x2a, 0x80, - 0x71, 0xd0, 0x68, 0x47, 0x6b, 0xb7, 0xb8, 0xe4, 0x72, 0x4b, 0xc0, 0x9b, 0x2f, 0x07, 0x6b, 0xff, - 0xa5, 0xd2, 0x4c, 0xa5, 0x99, 0xb8, 0x84, 0x7b, 0x14, 0xa9, 0x23, 0xf7, 0xc2, 0x65, 0x3b, 0xf3, - 0x5e, 0x0a, 0x96, 0xf4, 0xf0, 0x97, 0x6e, 0xda, 0x7f, 0xf6, 0x1a, 0xe6, 0x9a, 0xe4, 0xd6, 0xc5, - 0x0f, 0x9f, 0x2b, 0xc1, 0xef, 0x41, 0x31, 0xc0, 0xb5, 0xcb, 0x87, 0xa7, 0x41, 0x4d, 0x09, 0x92, - 0x47, 0x7e, 0xc7, 0xef, 0x36, 0xfb, 0xff, 0x50, 0xa1, 0x2b, 0x81, 0x85, 0x00, 0x5d, 0x0a, 0x92, - 0x0f, 0x6b, 0x8b, 0xf7, 0xff, 0xde, 0x68, 0x5b, 0x1f, 0xf6, 0x83, 0x3a, 0xd5, 0x40, 0xac, 0xd4, - 0x51, 0xa5, 0xe3, 0x77, 0x1b, 0xc3, 0xe8, 0xf5, 0x25, 0x69, 0x15, 0x7f, 0x0f, 0x18, 0xd3, 0x60, - 0xcc, 0x95, 0xd5, 0x59, 0xce, 0x47, 0x65, 0x61, 0x38, 0x0e, 0x1a, 0x6e, 0x7f, 0x44, 0x98, 0xa8, - 0xda, 0xa9, 0x76, 0x9b, 0xfd, 0x33, 0xf4, 0xad, 0xcd, 0xa1, 0xf3, 0xcd, 0x58, 0xa3, 0x1d, 0x2e, - 0xbc, 0x09, 0xea, 0x1a, 0xee, 0x80, 0x5a, 0x13, 0xd5, 0x7e, 0x80, 0x5c, 0xc2, 0x86, 0xb0, 0x58, - 0xc5, 0xfe, 0x72, 0x15, 0xfb, 0x1f, 0xab, 0xd8, 0x7f, 0x5a, 0xc7, 0xde, 0x72, 0x1d, 0x7b, 0x6f, - 0xeb, 0xd8, 0x1b, 0x5f, 0xf0, 0xcc, 0xde, 0xce, 0x52, 0x44, 0xe5, 0x14, 0x3b, 0x55, 0x52, 0xba, - 0xf0, 0x9e, 0x2b, 0xd9, 0xc9, 0x12, 0x67, 0xc3, 0xf7, 0x78, 0xef, 0x40, 0xf6, 0x41, 0x81, 0x49, - 0x7f, 0x6d, 0x2f, 0x74, 0xfc, 0x19, 0x00, 0x00, 0xff, 0xff, 0x4e, 0x3c, 0xba, 0xcf, 0x6c, 0x02, - 0x00, 0x00, +var fileDescriptor_fb4c43c47cec3bb3 = []byte{ + // 359 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x91, 0xbf, 0x4e, 0x2a, 0x41, + 0x14, 0xc6, 0x77, 0x81, 0x5c, 0xc2, 0x52, 0xdc, 0x64, 0x43, 0xb1, 0x12, 0xb3, 0x12, 0x63, 0x41, + 0xb3, 0xb3, 0x01, 0xa3, 0x95, 0x0d, 0x34, 0x16, 0x5a, 0x18, 0x8c, 0x16, 0x34, 0x64, 0x76, 0xe7, + 0x64, 0x5c, 0x33, 0xec, 0x4c, 0x66, 0x06, 0xa2, 0x3e, 0x85, 0xaf, 0x62, 0xe2, 0x43, 0x50, 0x12, + 0x2b, 0x2b, 0x63, 0xe0, 0x45, 0x0c, 0xcc, 0x6c, 0x90, 0xc4, 0x58, 0x10, 0xbb, 0x39, 0x7f, 0xe6, + 0x77, 0xbe, 0x73, 0x3e, 0xef, 0xf2, 0x29, 0xa3, 0x09, 0x00, 0x66, 0x2c, 0xc3, 0x79, 0x0a, 0x31, + 0xc9, 0x94, 0x96, 0x59, 0x32, 0xd1, 0x40, 0x52, 0x3e, 0x16, 0x26, 0xcb, 0x80, 0x50, 0x90, 0x31, + 0x49, 0xd9, 0x44, 0x50, 0x89, 0x09, 0xc4, 0x58, 0x08, 0xc9, 0xa7, 0x40, 0x46, 0x36, 0x81, 0x84, + 0xe4, 0x9a, 0xfb, 0x27, 0xdb, 0x34, 0xf4, 0x0b, 0x0d, 0x6d, 0x68, 0xcd, 0x06, 0xe5, 0x94, 0xaf, + 0x09, 0xf1, 0xea, 0x65, 0x60, 0xcd, 0xbd, 0x94, 0xab, 0x31, 0x57, 0x23, 0x53, 0x30, 0x81, 0x2d, + 0x1d, 0x99, 0x28, 0x2e, 0xe4, 0x4c, 0x3b, 0x09, 0x68, 0xdc, 0x89, 0xb7, 0xd4, 0x34, 0x7b, 0xbb, + 0xed, 0x46, 0x25, 0xce, 0xb5, 0x41, 0x1c, 0xbe, 0x94, 0xbc, 0xff, 0x3d, 0xbb, 0xeb, 0x8d, 0xa9, + 0xfb, 0xa7, 0x5e, 0x45, 0x30, 0x9c, 0x07, 0x6e, 0xcb, 0x6d, 0xd7, 0xbb, 0xfb, 0xc8, 0x2a, 0x2b, + 0x66, 0x5b, 0x2d, 0xe8, 0x8a, 0xe1, 0xbc, 0x5f, 0x99, 0x7d, 0x1c, 0x38, 0x83, 0x75, 0xbf, 0xdf, + 0xf5, 0xaa, 0xa9, 0x04, 0xac, 0xb9, 0x0c, 0x4a, 0x2d, 0xb7, 0x5d, 0xeb, 0x07, 0x6f, 0xaf, 0x51, + 0xc3, 0xfe, 0xee, 0x11, 0x22, 0x41, 0xa9, 0x6b, 0x2d, 0xb3, 0x9c, 0x0e, 0x8a, 0x46, 0x7f, 0xe8, + 0xd5, 0xcc, 0xa9, 0x31, 0x53, 0x41, 0xb9, 0x55, 0x6e, 0xd7, 0xbb, 0x67, 0x68, 0xa7, 0x23, 0xa3, + 0xf3, 0xd5, 0x5a, 0x83, 0x0d, 0xce, 0xbf, 0xf5, 0xaa, 0x12, 0xee, 0x21, 0xd5, 0x2a, 0xa8, 0xfc, + 0x01, 0xb9, 0x80, 0xf5, 0x61, 0xb6, 0x08, 0xdd, 0xf9, 0x22, 0x74, 0x3f, 0x17, 0xa1, 0xfb, 0xbc, + 0x0c, 0x9d, 0xf9, 0x32, 0x74, 0xde, 0x97, 0xa1, 0x33, 0xbc, 0xa0, 0x99, 0xbe, 0x9b, 0x24, 0x28, + 0xe5, 0xe3, 0xd8, 0x8c, 0x8a, 0x7e, 0x32, 0x27, 0xda, 0x0c, 0x8b, 0xac, 0x3d, 0x0f, 0xdf, 0x0d, + 0xd2, 0x8f, 0x02, 0x54, 0xf2, 0x6f, 0xed, 0xd0, 0xf1, 0x57, 0x00, 0x00, 0x00, 0xff, 0xff, 0xa3, + 0x50, 0xeb, 0x45, 0xc2, 0x02, 0x00, 0x00, } func (m *ApprovedUpgrade) Marshal() (dAtA []byte, err error) { diff --git a/x/dclupgrade/types/codec.go b/x/dclupgrade/types/codec.go index 7dbb4c3ca..9c3777159 100644 --- a/x/dclupgrade/types/codec.go +++ b/x/dclupgrade/types/codec.go @@ -26,7 +26,7 @@ func RegisterInterfaces(registry cdctypes.InterfaceRegistry) { ) // this line is used by starport scaffolding # 3 - msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) + msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) //nolint:nosnakecase } var ( diff --git a/x/dclupgrade/types/errors.go b/x/dclupgrade/types/errors.go index 43f0cadb0..46e16e08c 100644 --- a/x/dclupgrade/types/errors.go +++ b/x/dclupgrade/types/errors.go @@ -3,18 +3,18 @@ package types // DONTCOVER import ( - sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "cosmossdk.io/errors" ) // x/dclupgrade module sentinel errors. var ( - ErrProposedUpgradeAlreadyExists = sdkerrors.Register(ModuleName, 801, "proposed upgrade already exists") - ErrProposedUpgradeDoesNotExist = sdkerrors.Register(ModuleName, 802, "proposed upgrade does not exist") - ErrApprovedUpgradeAlreadyExists = sdkerrors.Register(ModuleName, 803, "approved upgrade already exists") + ErrProposedUpgradeAlreadyExists = errors.Register(ModuleName, 801, "proposed upgrade already exists") + ErrProposedUpgradeDoesNotExist = errors.Register(ModuleName, 802, "proposed upgrade does not exist") + ErrApprovedUpgradeAlreadyExists = errors.Register(ModuleName, 803, "approved upgrade already exists") ) func NewErrProposedUpgradeAlreadyExists(name interface{}) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrProposedUpgradeAlreadyExists, "Proposed upgrade with name=%v already exists on the ledger", name, @@ -22,7 +22,7 @@ func NewErrProposedUpgradeAlreadyExists(name interface{}) error { } func NewErrProposedUpgradeDoesNotExist(name interface{}) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrProposedUpgradeDoesNotExist, "Proposed upgrade with name=%v does not exist on the ledger", name, @@ -30,7 +30,7 @@ func NewErrProposedUpgradeDoesNotExist(name interface{}) error { } func NewErrApprovedUpgradeAlreadyExists(name interface{}) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrApprovedUpgradeAlreadyExists, "Approved upgrade with name=%v already exists on the ledger", name, diff --git a/x/dclupgrade/types/genesis.pb.go b/x/dclupgrade/types/genesis.pb.go index bfe248487..58dc451bb 100644 --- a/x/dclupgrade/types/genesis.pb.go +++ b/x/dclupgrade/types/genesis.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclupgrade/genesis.proto +// source: zigbeealliance/distributedcomplianceledger/dclupgrade/genesis.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -34,7 +34,7 @@ func (m *GenesisState) Reset() { *m = GenesisState{} } func (m *GenesisState) String() string { return proto.CompactTextString(m) } func (*GenesisState) ProtoMessage() {} func (*GenesisState) Descriptor() ([]byte, []int) { - return fileDescriptor_322f03871f60c416, []int{0} + return fileDescriptor_363cdb039e937086, []int{0} } func (m *GenesisState) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -88,29 +88,31 @@ func init() { proto.RegisterType((*GenesisState)(nil), "zigbeealliance.distributedcomplianceledger.dclupgrade.GenesisState") } -func init() { proto.RegisterFile("dclupgrade/genesis.proto", fileDescriptor_322f03871f60c416) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclupgrade/genesis.proto", fileDescriptor_363cdb039e937086) +} -var fileDescriptor_322f03871f60c416 = []byte{ - // 299 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x48, 0x49, 0xce, 0x29, - 0x2d, 0x48, 0x2f, 0x4a, 0x4c, 0x49, 0xd5, 0x4f, 0x4f, 0xcd, 0x4b, 0x2d, 0xce, 0x2c, 0xd6, 0x2b, - 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x32, 0xad, 0xca, 0x4c, 0x4f, 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, - 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, - 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, 0xa7, 0x16, 0xe9, 0x21, 0x0c, 0x91, - 0x12, 0x49, 0xcf, 0x4f, 0xcf, 0x07, 0x9b, 0xa0, 0x0f, 0x62, 0x41, 0x0c, 0x93, 0x52, 0x44, 0xb2, - 0xa6, 0xa0, 0x28, 0xbf, 0x20, 0xbf, 0x38, 0x35, 0x25, 0x1e, 0x2a, 0x80, 0x45, 0x49, 0x62, 0x41, - 0x41, 0x51, 0x7e, 0x19, 0x5e, 0x25, 0x45, 0xa9, 0x59, 0xa9, 0xc9, 0x25, 0xe8, 0x4a, 0x94, 0xfa, - 0x98, 0xb9, 0x78, 0xdc, 0x21, 0xfe, 0x08, 0x2e, 0x49, 0x2c, 0x49, 0x15, 0xaa, 0xe3, 0x12, 0x86, - 0x59, 0x18, 0x0a, 0x51, 0xe9, 0x93, 0x59, 0x5c, 0x22, 0xc1, 0xa8, 0xc0, 0xac, 0xc1, 0x6d, 0xe4, - 0xa6, 0x47, 0x96, 0x27, 0xf5, 0x02, 0x50, 0x4d, 0x74, 0x62, 0x39, 0x71, 0x4f, 0x9e, 0x21, 0x08, - 0x9b, 0x45, 0x20, 0xfb, 0x61, 0xbe, 0x41, 0xb6, 0x9f, 0x89, 0x22, 0xfb, 0x1d, 0x51, 0x4d, 0x84, - 0xd9, 0x8f, 0xc5, 0x22, 0x90, 0xfd, 0xb0, 0xa0, 0x42, 0xb6, 0x9f, 0x99, 0x22, 0xfb, 0x83, 0x50, - 0x4d, 0x84, 0xd9, 0x8f, 0xc5, 0x22, 0xa7, 0xd4, 0x13, 0x8f, 0xe4, 0x18, 0x2f, 0x3c, 0x92, 0x63, - 0x7c, 0xf0, 0x48, 0x8e, 0x71, 0xc2, 0x63, 0x39, 0x86, 0x0b, 0x8f, 0xe5, 0x18, 0x6e, 0x3c, 0x96, - 0x63, 0x88, 0xf2, 0x4e, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x87, 0x38, - 0x43, 0x17, 0xe6, 0x0e, 0x7d, 0x24, 0x77, 0xe8, 0x22, 0x1c, 0xa2, 0x0b, 0x71, 0x89, 0x7e, 0x85, - 0x3e, 0x52, 0x42, 0x28, 0xa9, 0x2c, 0x48, 0x2d, 0x4e, 0x62, 0x03, 0x47, 0xbf, 0x31, 0x20, 0x00, - 0x00, 0xff, 0xff, 0x13, 0x39, 0x47, 0xf1, 0xd0, 0x02, 0x00, 0x00, +var fileDescriptor_363cdb039e937086 = []byte{ + // 303 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0xae, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0xa7, 0x24, 0xe7, 0x94, 0x16, 0xa4, 0x17, 0x25, 0xa6, 0xa4, 0xea, 0xa7, 0xa7, + 0xe6, 0xa5, 0x16, 0x67, 0x16, 0xeb, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x99, 0xa2, 0x1a, 0xa2, + 0x87, 0xc7, 0x10, 0x3d, 0x84, 0x21, 0x52, 0x22, 0xe9, 0xf9, 0xe9, 0xf9, 0x60, 0x13, 0xf4, 0x41, + 0x2c, 0x88, 0x61, 0x52, 0x3e, 0xe4, 0xb9, 0xa8, 0xa0, 0x28, 0xbf, 0x20, 0xbf, 0x38, 0x35, 0x25, + 0x1e, 0x2a, 0x40, 0x99, 0x69, 0x89, 0x05, 0x05, 0x45, 0xf9, 0x65, 0xd4, 0x32, 0xad, 0x28, 0x35, + 0x2b, 0x35, 0xb9, 0x04, 0xdd, 0x34, 0xa5, 0x3e, 0x66, 0x2e, 0x1e, 0x77, 0x48, 0x40, 0x06, 0x97, + 0x24, 0x96, 0xa4, 0x0a, 0xd5, 0x71, 0x09, 0xc3, 0xbc, 0x11, 0x0a, 0x51, 0xe9, 0x93, 0x59, 0x5c, + 0x22, 0xc1, 0xa8, 0xc0, 0xac, 0xc1, 0x6d, 0xe4, 0xa6, 0x47, 0x56, 0x28, 0xeb, 0x05, 0xa0, 0x9a, + 0xe8, 0xc4, 0x72, 0xe2, 0x9e, 0x3c, 0x43, 0x10, 0x36, 0x8b, 0x40, 0xf6, 0xc3, 0x3c, 0x8e, 0x6c, + 0x3f, 0x13, 0x45, 0xf6, 0x3b, 0xa2, 0x9a, 0x08, 0xb3, 0x1f, 0x8b, 0x45, 0x20, 0xfb, 0x61, 0x41, + 0x85, 0x6c, 0x3f, 0x33, 0x45, 0xf6, 0x07, 0xa1, 0x9a, 0x08, 0xb3, 0x1f, 0x8b, 0x45, 0x4e, 0xa9, + 0x27, 0x1e, 0xc9, 0x31, 0x5e, 0x78, 0x24, 0xc7, 0xf8, 0xe0, 0x91, 0x1c, 0xe3, 0x84, 0xc7, 0x72, + 0x0c, 0x17, 0x1e, 0xcb, 0x31, 0xdc, 0x78, 0x2c, 0xc7, 0x10, 0xe5, 0x9d, 0x9e, 0x59, 0x92, 0x51, + 0x9a, 0xa4, 0x97, 0x9c, 0x9f, 0xab, 0x0f, 0x71, 0x86, 0x2e, 0xb6, 0x44, 0xa0, 0x8b, 0x70, 0x88, + 0x2e, 0x34, 0x19, 0x54, 0x20, 0x27, 0x84, 0x92, 0xca, 0x82, 0xd4, 0xe2, 0x24, 0x36, 0x70, 0xf4, + 0x1b, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0x06, 0xe4, 0x37, 0x0c, 0x7c, 0x03, 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { diff --git a/x/dclupgrade/types/grant.pb.go b/x/dclupgrade/types/grant.pb.go index 14c794aa2..29d4f2e60 100644 --- a/x/dclupgrade/types/grant.pb.go +++ b/x/dclupgrade/types/grant.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclupgrade/grant.proto +// source: zigbeealliance/distributedcomplianceledger/dclupgrade/grant.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -33,7 +33,7 @@ func (m *Grant) Reset() { *m = Grant{} } func (m *Grant) String() string { return proto.CompactTextString(m) } func (*Grant) ProtoMessage() {} func (*Grant) Descriptor() ([]byte, []int) { - return fileDescriptor_71d47e1bdab8c79d, []int{0} + return fileDescriptor_a749b46e32743c76, []int{0} } func (m *Grant) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -87,26 +87,28 @@ func init() { proto.RegisterType((*Grant)(nil), "zigbeealliance.distributedcomplianceledger.dclupgrade.Grant") } -func init() { proto.RegisterFile("dclupgrade/grant.proto", fileDescriptor_71d47e1bdab8c79d) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclupgrade/grant.proto", fileDescriptor_a749b46e32743c76) +} -var fileDescriptor_71d47e1bdab8c79d = []byte{ - // 252 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4b, 0x49, 0xce, 0x29, - 0x2d, 0x48, 0x2f, 0x4a, 0x4c, 0x49, 0xd5, 0x4f, 0x2f, 0x4a, 0xcc, 0x2b, 0xd1, 0x2b, 0x28, 0xca, - 0x2f, 0xc9, 0x17, 0x32, 0xad, 0xca, 0x4c, 0x4f, 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, - 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, - 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, 0xa7, 0x16, 0xe9, 0x21, 0x8c, 0x90, 0x92, 0x4c, - 0xce, 0x2f, 0xce, 0xcd, 0x2f, 0x8e, 0x07, 0x1b, 0xa2, 0x0f, 0xe1, 0x40, 0x4c, 0x54, 0x4a, 0xe6, - 0x62, 0x75, 0x07, 0x59, 0x20, 0x64, 0xc4, 0xc5, 0x9e, 0x98, 0x92, 0x52, 0x94, 0x5a, 0x5c, 0x2c, - 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0xe9, 0x24, 0x71, 0x69, 0x8b, 0xae, 0x08, 0x54, 0xad, 0x23, 0x44, - 0x26, 0xb8, 0xa4, 0x28, 0x33, 0x2f, 0x3d, 0x08, 0xa6, 0x50, 0x48, 0x88, 0x8b, 0xa5, 0x24, 0x33, - 0x37, 0x55, 0x82, 0x49, 0x81, 0x51, 0x83, 0x39, 0x08, 0xcc, 0x06, 0x89, 0x65, 0xe6, 0xa5, 0xe5, - 0x4b, 0x30, 0x83, 0x0c, 0x09, 0x02, 0xb3, 0x9d, 0x52, 0x4f, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, - 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x09, 0x8f, 0xe5, 0x18, 0x2e, 0x3c, 0x96, 0x63, 0xb8, 0xf1, - 0x58, 0x8e, 0x21, 0xca, 0x3b, 0x3d, 0xb3, 0x24, 0xa3, 0x34, 0x49, 0x2f, 0x39, 0x3f, 0x57, 0x1f, - 0xe2, 0x37, 0x5d, 0x98, 0xe7, 0xf4, 0x91, 0x3c, 0xa7, 0x8b, 0xf0, 0x9d, 0x2e, 0xc4, 0x7b, 0xfa, - 0x15, 0xfa, 0x48, 0x61, 0x54, 0x52, 0x59, 0x90, 0x5a, 0x9c, 0xc4, 0x06, 0xf6, 0x92, 0x31, 0x20, - 0x00, 0x00, 0xff, 0xff, 0x86, 0x7d, 0x1f, 0xa6, 0x3e, 0x01, 0x00, 0x00, +var fileDescriptor_a749b46e32743c76 = []byte{ + // 254 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0xac, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0xa7, 0x24, 0xe7, 0x94, 0x16, 0xa4, 0x17, 0x25, 0xa6, 0xa4, 0xea, 0xa7, 0x17, + 0x25, 0xe6, 0x95, 0xe8, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x99, 0xa2, 0x1a, 0xa1, 0x87, 0xc7, + 0x08, 0x3d, 0x84, 0x11, 0x52, 0x92, 0xc9, 0xf9, 0xc5, 0xb9, 0xf9, 0xc5, 0xf1, 0x60, 0x43, 0xf4, + 0x21, 0x1c, 0x88, 0x89, 0x4a, 0xc9, 0x5c, 0xac, 0xee, 0x20, 0x0b, 0x84, 0x8c, 0xb8, 0xd8, 0x13, + 0x53, 0x52, 0x8a, 0x52, 0x8b, 0x8b, 0x25, 0x18, 0x15, 0x18, 0x35, 0x38, 0x9d, 0x24, 0x2e, 0x6d, + 0xd1, 0x15, 0x81, 0xaa, 0x75, 0x84, 0xc8, 0x04, 0x97, 0x14, 0x65, 0xe6, 0xa5, 0x07, 0xc1, 0x14, + 0x0a, 0x09, 0x71, 0xb1, 0x94, 0x64, 0xe6, 0xa6, 0x4a, 0x30, 0x29, 0x30, 0x6a, 0x30, 0x07, 0x81, + 0xd9, 0x20, 0xb1, 0xcc, 0xbc, 0xb4, 0x7c, 0x09, 0x66, 0x90, 0x21, 0x41, 0x60, 0xb6, 0x53, 0xea, + 0x89, 0x47, 0x72, 0x8c, 0x17, 0x1e, 0xc9, 0x31, 0x3e, 0x78, 0x24, 0xc7, 0x38, 0xe1, 0xb1, 0x1c, + 0xc3, 0x85, 0xc7, 0x72, 0x0c, 0x37, 0x1e, 0xcb, 0x31, 0x44, 0x79, 0xa7, 0x67, 0x96, 0x64, 0x94, + 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0x43, 0xfc, 0xa6, 0x8b, 0x2d, 0x7c, 0x74, 0x11, 0xbe, 0xd3, + 0x85, 0x86, 0x50, 0x05, 0x72, 0x18, 0x95, 0x54, 0x16, 0xa4, 0x16, 0x27, 0xb1, 0x81, 0xbd, 0x64, + 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0xea, 0x91, 0x79, 0x3d, 0x69, 0x01, 0x00, 0x00, } func (m *Grant) Marshal() (dAtA []byte, err error) { diff --git a/x/dclupgrade/types/message_approve_upgrade.go b/x/dclupgrade/types/message_approve_upgrade.go index 6e22ab25b..f0ad4d34f 100644 --- a/x/dclupgrade/types/message_approve_upgrade.go +++ b/x/dclupgrade/types/message_approve_upgrade.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" @@ -47,7 +48,7 @@ func (msg *MsgApproveUpgrade) GetSignBytes() []byte { func (msg *MsgApproveUpgrade) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } err = validator.Validate(msg) diff --git a/x/dclupgrade/types/message_propose_upgrade.go b/x/dclupgrade/types/message_propose_upgrade.go index 313241779..c434db726 100644 --- a/x/dclupgrade/types/message_propose_upgrade.go +++ b/x/dclupgrade/types/message_propose_upgrade.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" @@ -47,7 +48,7 @@ func (msg *MsgProposeUpgrade) GetSignBytes() []byte { func (msg *MsgProposeUpgrade) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } err = validator.Validate(msg) diff --git a/x/dclupgrade/types/message_propose_upgrade_test.go b/x/dclupgrade/types/message_propose_upgrade_test.go index 35d3a886f..459f893d4 100644 --- a/x/dclupgrade/types/message_propose_upgrade_test.go +++ b/x/dclupgrade/types/message_propose_upgrade_test.go @@ -2,9 +2,7 @@ package types import ( "testing" - "time" - codectypes "github.com/cosmos/cosmos-sdk/codec/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" @@ -87,36 +85,6 @@ func TestMsgProposeUpgrade_ValidateBasic(t *testing.T) { }, err: sdkerrors.ErrInvalidRequest, }, - { - name: "plan time is not zero", - msg: MsgProposeUpgrade{ - Creator: sample.AccAddress(), - Plan: Plan{ - Name: testconstants.UpgradePlanName, - Height: testconstants.UpgradePlanHeight, - Info: testconstants.UpgradePlanInfo, - Time: time.Now(), - }, - Info: testconstants.Info, - Time: testconstants.Time, - }, - err: *sdkerrors.ErrInvalidRequest, - }, - { - name: "Plan upgradedClientState is not nil", - msg: MsgProposeUpgrade{ - Creator: sample.AccAddress(), - Plan: Plan{ - Name: testconstants.UpgradePlanName, - Height: testconstants.UpgradePlanHeight, - Info: testconstants.UpgradePlanInfo, - UpgradedClientState: &codectypes.Any{TypeUrl: "333"}, - }, - Info: testconstants.Info, - Time: testconstants.Time, - }, - err: *sdkerrors.ErrInvalidRequest, - }, } positiveTests := []struct { diff --git a/x/dclupgrade/types/message_reject_upgrade.go b/x/dclupgrade/types/message_reject_upgrade.go index e004f17d9..a162bf258 100644 --- a/x/dclupgrade/types/message_reject_upgrade.go +++ b/x/dclupgrade/types/message_reject_upgrade.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" ) @@ -46,7 +47,7 @@ func (msg *MsgRejectUpgrade) GetSignBytes() []byte { func (msg *MsgRejectUpgrade) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } return nil diff --git a/x/dclupgrade/types/proposed_upgrade.pb.go b/x/dclupgrade/types/proposed_upgrade.pb.go index 783785b87..75e733569 100644 --- a/x/dclupgrade/types/proposed_upgrade.pb.go +++ b/x/dclupgrade/types/proposed_upgrade.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclupgrade/proposed_upgrade.proto +// source: zigbeealliance/distributedcomplianceledger/dclupgrade/proposed_upgrade.proto package types @@ -7,8 +7,8 @@ import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" types "github.com/cosmos/cosmos-sdk/x/upgrade/types" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -36,7 +36,7 @@ func (m *ProposedUpgrade) Reset() { *m = ProposedUpgrade{} } func (m *ProposedUpgrade) String() string { return proto.CompactTextString(m) } func (*ProposedUpgrade) ProtoMessage() {} func (*ProposedUpgrade) Descriptor() ([]byte, []int) { - return fileDescriptor_0b2f793e220daf3a, []int{0} + return fileDescriptor_0325dd5a8e62f2be, []int{0} } func (m *ProposedUpgrade) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -97,33 +97,35 @@ func init() { proto.RegisterType((*ProposedUpgrade)(nil), "zigbeealliance.distributedcomplianceledger.dclupgrade.ProposedUpgrade") } -func init() { proto.RegisterFile("dclupgrade/proposed_upgrade.proto", fileDescriptor_0b2f793e220daf3a) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclupgrade/proposed_upgrade.proto", fileDescriptor_0325dd5a8e62f2be) +} -var fileDescriptor_0b2f793e220daf3a = []byte{ - // 356 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x91, 0xbd, 0x4e, 0x2a, 0x41, - 0x14, 0x80, 0x77, 0x81, 0x5c, 0xc2, 0x52, 0xdc, 0x64, 0x43, 0x6e, 0xf6, 0x92, 0x9b, 0xbd, 0x68, - 0x2c, 0x68, 0x76, 0x26, 0x60, 0xb4, 0xb2, 0x91, 0xc6, 0xc2, 0x86, 0x60, 0xb4, 0xa0, 0x21, 0xb3, - 0x33, 0x27, 0xe3, 0x9a, 0x61, 0x67, 0x32, 0x33, 0x10, 0xf5, 0x29, 0x7c, 0x15, 0x13, 0x1f, 0x82, - 0x92, 0x58, 0x59, 0x19, 0x03, 0x2f, 0x62, 0x60, 0x76, 0x03, 0xb6, 0xc6, 0x6e, 0xcf, 0xcf, 0x7e, - 0xdf, 0x39, 0x73, 0x82, 0x03, 0x46, 0xc5, 0x4c, 0x71, 0x4d, 0x18, 0x60, 0xa5, 0xa5, 0x92, 0x06, - 0xd8, 0xa4, 0x48, 0x20, 0xa5, 0xa5, 0x95, 0xe1, 0xc9, 0x63, 0xc6, 0x53, 0x00, 0x22, 0x44, 0x46, - 0x72, 0x0a, 0x88, 0x65, 0xc6, 0xea, 0x2c, 0x9d, 0x59, 0x60, 0x54, 0x4e, 0x95, 0xcb, 0x0a, 0x60, - 0x1c, 0x34, 0xda, 0xd1, 0xda, 0x2d, 0x2e, 0xb9, 0xdc, 0x12, 0xf0, 0xe6, 0xcb, 0xc1, 0xda, 0x7f, - 0xa9, 0x34, 0x53, 0x69, 0x26, 0xae, 0xe0, 0x82, 0xa2, 0x74, 0xe4, 0x22, 0x5c, 0x8e, 0x33, 0xef, - 0xa5, 0x60, 0x49, 0x0f, 0x7f, 0x99, 0xa6, 0xfd, 0x67, 0x6f, 0x60, 0xae, 0x49, 0x6e, 0x5d, 0xfe, - 0xf0, 0xb9, 0x12, 0xfc, 0x1e, 0x16, 0x0b, 0x5c, 0xbb, 0x7a, 0x78, 0x1a, 0xd4, 0x94, 0x20, 0x79, - 0xe4, 0x77, 0xfc, 0x6e, 0xb3, 0xff, 0x0f, 0x15, 0xba, 0x12, 0x58, 0x08, 0xd0, 0x50, 0x90, 0x7c, - 0x50, 0x5b, 0xbc, 0xff, 0xf7, 0x46, 0xdb, 0xfe, 0xb0, 0x1f, 0xd4, 0xa9, 0x06, 0x62, 0xa5, 0x8e, - 0x2a, 0x1d, 0xbf, 0xdb, 0x18, 0x44, 0xaf, 0x2f, 0x49, 0xab, 0xf8, 0xfb, 0x9c, 0x31, 0x0d, 0xc6, - 0x5c, 0x59, 0x9d, 0xe5, 0x7c, 0x54, 0x36, 0x86, 0xe3, 0xa0, 0x41, 0x94, 0xd2, 0x72, 0x4e, 0x84, - 0x89, 0xaa, 0x9d, 0x6a, 0xb7, 0xd9, 0x3f, 0x43, 0xdf, 0x7a, 0x39, 0x74, 0xb1, 0x59, 0x6b, 0xb4, - 0xc3, 0x85, 0x37, 0x41, 0x5d, 0xc3, 0x1d, 0x50, 0x6b, 0xa2, 0xda, 0x0f, 0x90, 0x4b, 0xd8, 0x00, - 0x16, 0xab, 0xd8, 0x5f, 0xae, 0x62, 0xff, 0x63, 0x15, 0xfb, 0x4f, 0xeb, 0xd8, 0x5b, 0xae, 0x63, - 0xef, 0x6d, 0x1d, 0x7b, 0xe3, 0x4b, 0x9e, 0xd9, 0xdb, 0x59, 0x8a, 0xa8, 0x9c, 0x62, 0xa7, 0x4a, - 0x4a, 0x17, 0xde, 0x73, 0x25, 0x3b, 0x59, 0xe2, 0x6c, 0xf8, 0x1e, 0xef, 0x1d, 0xc8, 0x3e, 0x28, - 0x30, 0xe9, 0xaf, 0xed, 0x85, 0x8e, 0x3f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x42, 0xaa, 0xfd, 0x84, - 0x6c, 0x02, 0x00, 0x00, +var fileDescriptor_0325dd5a8e62f2be = []byte{ + // 361 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x91, 0xbf, 0x4e, 0x2a, 0x41, + 0x14, 0xc6, 0x77, 0x81, 0x5c, 0xc2, 0x52, 0xdc, 0x64, 0x43, 0xb1, 0x97, 0xdc, 0xac, 0xc4, 0x58, + 0xd0, 0xec, 0x6c, 0xc0, 0x68, 0x65, 0x03, 0x8d, 0x85, 0x16, 0x04, 0xa3, 0x05, 0x0d, 0x99, 0xdd, + 0x39, 0x19, 0xd7, 0x0c, 0x3b, 0x93, 0x99, 0x81, 0xa8, 0x4f, 0xe1, 0xab, 0x98, 0xf8, 0x10, 0x94, + 0xc4, 0xca, 0xca, 0x18, 0x78, 0x11, 0x03, 0x33, 0x1b, 0x24, 0x31, 0x16, 0xc4, 0x6e, 0xcf, 0x9f, + 0xfd, 0x9d, 0xef, 0x9b, 0xcf, 0xbb, 0x7c, 0xcc, 0x68, 0x02, 0x80, 0x19, 0xcb, 0x70, 0x9e, 0x42, + 0x4c, 0x32, 0xa5, 0x65, 0x96, 0x4c, 0x35, 0x90, 0x94, 0x4f, 0x84, 0xe9, 0x32, 0x20, 0x14, 0x64, + 0x4c, 0x52, 0x36, 0x15, 0x54, 0x62, 0x02, 0xb1, 0x90, 0x5c, 0x70, 0x05, 0x64, 0x6c, 0x1b, 0x48, + 0x48, 0xae, 0xb9, 0x7f, 0xb2, 0x4b, 0x43, 0x3f, 0xd0, 0xd0, 0x96, 0xd6, 0x6c, 0x50, 0x4e, 0xf9, + 0x86, 0x10, 0xaf, 0xbf, 0x0c, 0xac, 0xf9, 0x2f, 0xe5, 0x6a, 0xc2, 0xd5, 0xd8, 0x0c, 0x4c, 0x61, + 0x47, 0x47, 0xa6, 0x8a, 0x0b, 0x39, 0xb3, 0x4e, 0x02, 0x1a, 0x77, 0xe2, 0x1d, 0x35, 0xcd, 0xde, + 0x7e, 0xde, 0xa8, 0xc4, 0xb9, 0x36, 0x88, 0xc3, 0xe7, 0x92, 0xf7, 0x77, 0x60, 0xbd, 0x5e, 0x9b, + 0xb9, 0x7f, 0xea, 0x55, 0x04, 0xc3, 0x79, 0xe0, 0xb6, 0xdc, 0x76, 0xbd, 0xfb, 0x1f, 0x59, 0x65, + 0xc5, 0x6d, 0xab, 0x05, 0x0d, 0x18, 0xce, 0xfb, 0x95, 0xf9, 0xfb, 0x81, 0x33, 0xdc, 0xec, 0xfb, + 0x5d, 0xaf, 0x9a, 0x4a, 0xc0, 0x9a, 0xcb, 0xa0, 0xd4, 0x72, 0xdb, 0xb5, 0x7e, 0xf0, 0xfa, 0x12, + 0x35, 0xec, 0xdf, 0x3d, 0x42, 0x24, 0x28, 0x75, 0xa5, 0x65, 0x96, 0xd3, 0x61, 0xb1, 0xe8, 0x8f, + 0xbc, 0x1a, 0x16, 0x42, 0xf2, 0x19, 0x66, 0x2a, 0x28, 0xb7, 0xca, 0xed, 0x7a, 0xf7, 0x0c, 0xed, + 0xf5, 0xc8, 0xe8, 0x7c, 0x6d, 0x6b, 0xb8, 0xc5, 0xf9, 0x37, 0x5e, 0x55, 0xc2, 0x1d, 0xa4, 0x5a, + 0x05, 0x95, 0x5f, 0x20, 0x17, 0xb0, 0x3e, 0xcc, 0x97, 0xa1, 0xbb, 0x58, 0x86, 0xee, 0xc7, 0x32, + 0x74, 0x9f, 0x56, 0xa1, 0xb3, 0x58, 0x85, 0xce, 0xdb, 0x2a, 0x74, 0x46, 0x17, 0x34, 0xd3, 0xb7, + 0xd3, 0x04, 0xa5, 0x7c, 0x12, 0x9b, 0x53, 0xd1, 0x77, 0xe1, 0x44, 0xdb, 0x63, 0x91, 0x8d, 0xe7, + 0xfe, 0x6b, 0x40, 0xfa, 0x41, 0x80, 0x4a, 0xfe, 0x6c, 0x12, 0x3a, 0xfe, 0x0c, 0x00, 0x00, 0xff, + 0xff, 0x3c, 0x63, 0x17, 0x3a, 0xc2, 0x02, 0x00, 0x00, } func (m *ProposedUpgrade) Marshal() (dAtA []byte, err error) { diff --git a/x/dclupgrade/types/query.pb.go b/x/dclupgrade/types/query.pb.go index 7166daf23..43135b131 100644 --- a/x/dclupgrade/types/query.pb.go +++ b/x/dclupgrade/types/query.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclupgrade/query.proto +// source: zigbeealliance/distributedcomplianceledger/dclupgrade/query.proto package types @@ -7,9 +7,9 @@ import ( context "context" fmt "fmt" query "github.com/cosmos/cosmos-sdk/types/query" - _ "github.com/gogo/protobuf/gogoproto" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -38,7 +38,7 @@ func (m *QueryGetProposedUpgradeRequest) Reset() { *m = QueryGetProposed func (m *QueryGetProposedUpgradeRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetProposedUpgradeRequest) ProtoMessage() {} func (*QueryGetProposedUpgradeRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_24d33970bdf865fd, []int{0} + return fileDescriptor_da58ea5765b93b29, []int{0} } func (m *QueryGetProposedUpgradeRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -82,7 +82,7 @@ func (m *QueryGetProposedUpgradeResponse) Reset() { *m = QueryGetPropose func (m *QueryGetProposedUpgradeResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetProposedUpgradeResponse) ProtoMessage() {} func (*QueryGetProposedUpgradeResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_24d33970bdf865fd, []int{1} + return fileDescriptor_da58ea5765b93b29, []int{1} } func (m *QueryGetProposedUpgradeResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -126,7 +126,7 @@ func (m *QueryAllProposedUpgradeRequest) Reset() { *m = QueryAllProposed func (m *QueryAllProposedUpgradeRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllProposedUpgradeRequest) ProtoMessage() {} func (*QueryAllProposedUpgradeRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_24d33970bdf865fd, []int{2} + return fileDescriptor_da58ea5765b93b29, []int{2} } func (m *QueryAllProposedUpgradeRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -171,7 +171,7 @@ func (m *QueryAllProposedUpgradeResponse) Reset() { *m = QueryAllPropose func (m *QueryAllProposedUpgradeResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllProposedUpgradeResponse) ProtoMessage() {} func (*QueryAllProposedUpgradeResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_24d33970bdf865fd, []int{3} + return fileDescriptor_da58ea5765b93b29, []int{3} } func (m *QueryAllProposedUpgradeResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -222,7 +222,7 @@ func (m *QueryGetApprovedUpgradeRequest) Reset() { *m = QueryGetApproved func (m *QueryGetApprovedUpgradeRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetApprovedUpgradeRequest) ProtoMessage() {} func (*QueryGetApprovedUpgradeRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_24d33970bdf865fd, []int{4} + return fileDescriptor_da58ea5765b93b29, []int{4} } func (m *QueryGetApprovedUpgradeRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -266,7 +266,7 @@ func (m *QueryGetApprovedUpgradeResponse) Reset() { *m = QueryGetApprove func (m *QueryGetApprovedUpgradeResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetApprovedUpgradeResponse) ProtoMessage() {} func (*QueryGetApprovedUpgradeResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_24d33970bdf865fd, []int{5} + return fileDescriptor_da58ea5765b93b29, []int{5} } func (m *QueryGetApprovedUpgradeResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -310,7 +310,7 @@ func (m *QueryAllApprovedUpgradeRequest) Reset() { *m = QueryAllApproved func (m *QueryAllApprovedUpgradeRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllApprovedUpgradeRequest) ProtoMessage() {} func (*QueryAllApprovedUpgradeRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_24d33970bdf865fd, []int{6} + return fileDescriptor_da58ea5765b93b29, []int{6} } func (m *QueryAllApprovedUpgradeRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -355,7 +355,7 @@ func (m *QueryAllApprovedUpgradeResponse) Reset() { *m = QueryAllApprove func (m *QueryAllApprovedUpgradeResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllApprovedUpgradeResponse) ProtoMessage() {} func (*QueryAllApprovedUpgradeResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_24d33970bdf865fd, []int{7} + return fileDescriptor_da58ea5765b93b29, []int{7} } func (m *QueryAllApprovedUpgradeResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -406,7 +406,7 @@ func (m *QueryGetRejectedUpgradeRequest) Reset() { *m = QueryGetRejected func (m *QueryGetRejectedUpgradeRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetRejectedUpgradeRequest) ProtoMessage() {} func (*QueryGetRejectedUpgradeRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_24d33970bdf865fd, []int{8} + return fileDescriptor_da58ea5765b93b29, []int{8} } func (m *QueryGetRejectedUpgradeRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -450,7 +450,7 @@ func (m *QueryGetRejectedUpgradeResponse) Reset() { *m = QueryGetRejecte func (m *QueryGetRejectedUpgradeResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetRejectedUpgradeResponse) ProtoMessage() {} func (*QueryGetRejectedUpgradeResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_24d33970bdf865fd, []int{9} + return fileDescriptor_da58ea5765b93b29, []int{9} } func (m *QueryGetRejectedUpgradeResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -494,7 +494,7 @@ func (m *QueryAllRejectedUpgradeRequest) Reset() { *m = QueryAllRejected func (m *QueryAllRejectedUpgradeRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllRejectedUpgradeRequest) ProtoMessage() {} func (*QueryAllRejectedUpgradeRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_24d33970bdf865fd, []int{10} + return fileDescriptor_da58ea5765b93b29, []int{10} } func (m *QueryAllRejectedUpgradeRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -539,7 +539,7 @@ func (m *QueryAllRejectedUpgradeResponse) Reset() { *m = QueryAllRejecte func (m *QueryAllRejectedUpgradeResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllRejectedUpgradeResponse) ProtoMessage() {} func (*QueryAllRejectedUpgradeResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_24d33970bdf865fd, []int{11} + return fileDescriptor_da58ea5765b93b29, []int{11} } func (m *QueryAllRejectedUpgradeResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -597,51 +597,53 @@ func init() { proto.RegisterType((*QueryAllRejectedUpgradeResponse)(nil), "zigbeealliance.distributedcomplianceledger.dclupgrade.QueryAllRejectedUpgradeResponse") } -func init() { proto.RegisterFile("dclupgrade/query.proto", fileDescriptor_24d33970bdf865fd) } - -var fileDescriptor_24d33970bdf865fd = []byte{ - // 644 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x57, 0x4f, 0x6b, 0xd4, 0x40, - 0x14, 0xdf, 0x74, 0xab, 0x60, 0x7a, 0x28, 0x0c, 0x22, 0xb2, 0x48, 0x6a, 0x23, 0x68, 0x2d, 0x6c, - 0xc6, 0x56, 0xfd, 0x00, 0xdb, 0x43, 0x7b, 0xf0, 0x52, 0x17, 0xea, 0xc1, 0x8b, 0x4c, 0x92, 0x47, - 0x1a, 0xc9, 0x66, 0xa6, 0x99, 0xd9, 0xc5, 0x2a, 0x5e, 0xfc, 0x04, 0x82, 0x27, 0x3f, 0x8a, 0xdf, - 0xa0, 0xc7, 0x82, 0x17, 0x4f, 0x22, 0xbb, 0x9e, 0x3d, 0xf8, 0x09, 0xca, 0x26, 0x13, 0x36, 0x99, - 0xcd, 0x6c, 0xd9, 0x66, 0x73, 0x0b, 0x33, 0x6f, 0xde, 0x7b, 0xbf, 0x3f, 0xbc, 0x47, 0xcc, 0x7b, - 0xbe, 0x17, 0x0d, 0x59, 0x90, 0x10, 0x1f, 0xf0, 0xd9, 0x10, 0x92, 0x73, 0x87, 0x25, 0x54, 0x50, - 0xf4, 0xf2, 0x63, 0x18, 0xb8, 0x00, 0x24, 0x8a, 0x42, 0x12, 0x7b, 0xe0, 0xf8, 0x21, 0x17, 0x49, - 0xe8, 0x0e, 0x05, 0xf8, 0x1e, 0x1d, 0xb0, 0xec, 0x34, 0x02, 0x3f, 0x80, 0xc4, 0x99, 0xa5, 0xe8, - 0xdc, 0x0d, 0x68, 0x40, 0xd3, 0x0c, 0x78, 0xfa, 0x95, 0x25, 0xeb, 0x3c, 0x08, 0x28, 0x0d, 0x22, - 0xc0, 0x84, 0x85, 0x98, 0xc4, 0x31, 0x15, 0x44, 0x84, 0x34, 0xe6, 0xf2, 0x76, 0xd7, 0xa3, 0x7c, - 0x40, 0x39, 0x76, 0x09, 0x97, 0x3d, 0xe0, 0xd1, 0x9e, 0x0b, 0x82, 0xec, 0x61, 0x46, 0x82, 0x30, - 0x4e, 0x83, 0x65, 0xec, 0x76, 0xa1, 0x5d, 0x96, 0x50, 0x46, 0x39, 0xf8, 0xef, 0xe4, 0x41, 0x45, - 0x08, 0x61, 0x2c, 0xa1, 0xa3, 0x85, 0x21, 0x09, 0xbc, 0x07, 0x4f, 0xa8, 0x21, 0xf6, 0x0b, 0xd3, - 0x7a, 0x3d, 0x6d, 0xe5, 0x08, 0xc4, 0xb1, 0xac, 0x73, 0x92, 0x05, 0xf4, 0xe1, 0x6c, 0x08, 0x5c, - 0x20, 0x64, 0xae, 0xc7, 0x64, 0x00, 0xf7, 0x8d, 0x87, 0xc6, 0xce, 0x9d, 0x7e, 0xfa, 0x6d, 0x7f, - 0x37, 0xcc, 0x2d, 0xed, 0x33, 0xce, 0x68, 0xcc, 0x01, 0x8d, 0xcc, 0x4d, 0x56, 0xbe, 0x4a, 0x53, - 0x6c, 0xec, 0x1f, 0x3a, 0x37, 0xe2, 0xdc, 0x51, 0x0a, 0x1d, 0xac, 0x5f, 0xfc, 0xde, 0x6a, 0xf5, - 0xd5, 0x22, 0xf6, 0xa9, 0x44, 0xd4, 0x8b, 0x22, 0x0d, 0xa2, 0x43, 0xd3, 0x9c, 0x11, 0x2e, 0x9b, - 0x7a, 0xec, 0x64, 0xea, 0x38, 0x53, 0x75, 0x9c, 0xcc, 0x21, 0x52, 0x1d, 0xe7, 0x98, 0x04, 0xf9, - 0xdb, 0x7e, 0xe1, 0xa5, 0x3d, 0xce, 0x59, 0xa8, 0x2a, 0xb5, 0x88, 0x85, 0x76, 0xe3, 0x2c, 0xa0, - 0xa3, 0x12, 0xc6, 0xb5, 0x14, 0xe3, 0x93, 0x6b, 0x31, 0x66, 0x4d, 0x97, 0x40, 0x16, 0x0c, 0xd2, - 0x93, 0x2e, 0x5b, 0xd2, 0x20, 0x73, 0xcf, 0x66, 0xd4, 0x90, 0xf2, 0x55, 0x4d, 0x83, 0x28, 0x85, - 0x72, 0x6a, 0x94, 0x22, 0x45, 0x83, 0x68, 0x10, 0x35, 0x61, 0x90, 0xa5, 0x58, 0x68, 0x37, 0xce, - 0x42, 0x23, 0x06, 0xe9, 0xcb, 0x19, 0xb3, 0xa4, 0x41, 0xe6, 0x9e, 0xcd, 0xa8, 0x49, 0xca, 0x57, - 0x35, 0x0d, 0xa2, 0x14, 0xca, 0xa9, 0x51, 0x8a, 0x14, 0x0d, 0xa2, 0x41, 0xd4, 0x84, 0x41, 0x96, - 0x62, 0xa1, 0xdd, 0x38, 0x0b, 0x2b, 0x33, 0xc8, 0xfe, 0x8f, 0x0d, 0xf3, 0x56, 0x0a, 0x12, 0xfd, - 0x37, 0xcc, 0x4d, 0x65, 0x7e, 0xa1, 0x93, 0x1b, 0xa2, 0x58, 0xbc, 0xb5, 0x3a, 0x6f, 0x56, 0x9d, - 0x36, 0x03, 0x66, 0x3f, 0xfb, 0xf2, 0xf3, 0xef, 0xb7, 0xb5, 0x5d, 0xb4, 0x83, 0x7d, 0x2f, 0xc2, - 0x15, 0x5b, 0xba, 0x2b, 0x0f, 0x38, 0xfe, 0x34, 0x35, 0xfa, 0x67, 0xf4, 0xcf, 0x30, 0x91, 0x92, - 0xad, 0x17, 0x45, 0xf5, 0x70, 0x6b, 0x77, 0x5b, 0x3d, 0xdc, 0xfa, 0x3d, 0x66, 0x3f, 0x4d, 0x71, - 0x3f, 0x42, 0xdb, 0xd7, 0xe2, 0x4e, 0x55, 0x56, 0x86, 0x50, 0x6d, 0x95, 0xab, 0x07, 0x75, 0x6d, - 0x95, 0x35, 0x43, 0x59, 0xaf, 0x72, 0x3e, 0x45, 0x2b, 0x55, 0x56, 0xb2, 0xad, 0x42, 0xe5, 0x26, - 0x70, 0xeb, 0x97, 0x91, 0x5e, 0xe5, 0x39, 0xdc, 0xa9, 0xca, 0xca, 0x24, 0xa9, 0xad, 0x72, 0xf5, - 0xb4, 0xad, 0xad, 0xb2, 0x66, 0xb2, 0xea, 0x55, 0xce, 0x47, 0x61, 0xa5, 0xca, 0x4a, 0xb6, 0x55, - 0xa8, 0xdc, 0x04, 0x6e, 0xfd, 0x46, 0xd1, 0xab, 0x3c, 0x87, 0xfb, 0x00, 0x2e, 0xc6, 0x96, 0x71, - 0x39, 0xb6, 0x8c, 0x3f, 0x63, 0xcb, 0xf8, 0x3a, 0xb1, 0x5a, 0x97, 0x13, 0xab, 0xf5, 0x6b, 0x62, - 0xb5, 0xde, 0xbe, 0x0a, 0x42, 0x71, 0x3a, 0x74, 0x1d, 0x8f, 0x0e, 0x70, 0xd6, 0x66, 0x37, 0xef, - 0x13, 0x17, 0xfa, 0xec, 0xce, 0x1a, 0xed, 0x66, 0x9d, 0xe2, 0x0f, 0xc5, 0x92, 0xe2, 0x9c, 0x01, - 0x77, 0x6f, 0xa7, 0x3f, 0x23, 0xcf, 0xaf, 0x02, 0x00, 0x00, 0xff, 0xff, 0xb1, 0x16, 0x1e, 0x91, - 0xa6, 0x0d, 0x00, 0x00, +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclupgrade/query.proto", fileDescriptor_da58ea5765b93b29) +} + +var fileDescriptor_da58ea5765b93b29 = []byte{ + // 647 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x57, 0xbf, 0x6e, 0xd3, 0x40, + 0x18, 0x8f, 0x9b, 0x82, 0x84, 0x3b, 0x54, 0x3a, 0x31, 0xa0, 0x08, 0xb9, 0x60, 0x24, 0x28, 0x95, + 0xe2, 0xa3, 0x05, 0x1e, 0x20, 0x1d, 0xda, 0x01, 0x86, 0x12, 0xa9, 0x0c, 0x2c, 0xe8, 0x6c, 0x7f, + 0x72, 0x8d, 0x1c, 0xdf, 0xd5, 0x77, 0x89, 0x28, 0x88, 0x85, 0x27, 0x40, 0x62, 0xe2, 0x51, 0x78, + 0x83, 0x8e, 0x95, 0x58, 0x98, 0x10, 0x4a, 0x98, 0x19, 0x78, 0x02, 0x14, 0xfb, 0xac, 0xc4, 0x17, + 0x5f, 0xaa, 0xc4, 0xf6, 0x66, 0xf9, 0x7c, 0xbf, 0xef, 0xfb, 0xfd, 0xd1, 0xf7, 0xc9, 0x66, 0xef, + 0x43, 0x18, 0xb8, 0x00, 0x24, 0x8a, 0x42, 0x12, 0x7b, 0x80, 0xfd, 0x90, 0x8b, 0x24, 0x74, 0x87, + 0x02, 0x7c, 0x8f, 0x0e, 0x58, 0xf6, 0x36, 0x02, 0x3f, 0x80, 0x04, 0xfb, 0x5e, 0x34, 0x64, 0x41, + 0x42, 0x7c, 0xc0, 0xe7, 0x43, 0x48, 0x2e, 0x1c, 0x96, 0x50, 0x41, 0xd1, 0xf3, 0x22, 0x84, 0xb3, + 0x04, 0xc2, 0x99, 0x41, 0x74, 0x6e, 0x07, 0x34, 0xa0, 0x29, 0x02, 0x9e, 0x3e, 0x65, 0x60, 0x9d, + 0xbb, 0x01, 0xa5, 0x41, 0x04, 0x98, 0xb0, 0x10, 0x93, 0x38, 0xa6, 0x82, 0x88, 0x90, 0xc6, 0x5c, + 0x9e, 0xee, 0x79, 0x94, 0x0f, 0x28, 0xc7, 0x2e, 0xe1, 0xb2, 0x07, 0x3c, 0xda, 0x77, 0x41, 0x90, + 0x7d, 0xcc, 0x48, 0x10, 0xc6, 0xe9, 0xc7, 0xf2, 0xdb, 0x97, 0xeb, 0x31, 0x63, 0x09, 0x65, 0x94, + 0x83, 0xff, 0x56, 0xbe, 0xa8, 0x86, 0x46, 0x18, 0x4b, 0xe8, 0xa8, 0x2e, 0xb4, 0x04, 0xde, 0x81, + 0x27, 0x54, 0x34, 0xfb, 0x99, 0x69, 0xbd, 0x9a, 0x6a, 0x71, 0x0c, 0xe2, 0x44, 0x76, 0x7f, 0x9a, + 0x7d, 0xd0, 0x87, 0xf3, 0x21, 0x70, 0x81, 0x90, 0xb9, 0x19, 0x93, 0x01, 0xdc, 0x31, 0xee, 0x19, + 0xbb, 0xb7, 0xfa, 0xe9, 0xb3, 0xfd, 0xcd, 0x30, 0x77, 0xb4, 0xd7, 0x38, 0xa3, 0x31, 0x07, 0x34, + 0x32, 0xb7, 0x59, 0xf1, 0x28, 0x85, 0xd8, 0x3a, 0x38, 0x72, 0xd6, 0x32, 0xdd, 0x51, 0x0a, 0x1d, + 0x6e, 0x5e, 0xfe, 0xda, 0x69, 0xf5, 0xd5, 0x22, 0xf6, 0x99, 0x64, 0xd4, 0x8b, 0x22, 0x0d, 0xa3, + 0x23, 0xd3, 0x9c, 0x39, 0x2e, 0x9b, 0x7a, 0xe8, 0x64, 0xf1, 0x70, 0xa6, 0xf1, 0x70, 0xb2, 0x88, + 0xca, 0x78, 0x38, 0x27, 0x24, 0xc8, 0xef, 0xf6, 0xe7, 0x6e, 0xda, 0xe3, 0x5c, 0x85, 0xb2, 0x52, + 0xcb, 0x54, 0x68, 0x37, 0xae, 0x02, 0x3a, 0x2e, 0x70, 0xdc, 0x48, 0x39, 0x3e, 0xba, 0x96, 0x63, + 0xd6, 0x74, 0x81, 0xe4, 0x5c, 0x40, 0x7a, 0x32, 0x90, 0x2b, 0x06, 0x64, 0xe1, 0xda, 0x4c, 0x1a, + 0x52, 0x3c, 0xaa, 0x18, 0x10, 0xa5, 0x50, 0x2e, 0x8d, 0x52, 0x64, 0x3e, 0x20, 0x1a, 0x46, 0x4d, + 0x04, 0x64, 0x25, 0x15, 0xda, 0x8d, 0xab, 0xd0, 0x48, 0x40, 0xfa, 0x72, 0xc6, 0xac, 0x18, 0x90, + 0x85, 0x6b, 0x33, 0x69, 0x92, 0xe2, 0x51, 0xc5, 0x80, 0x28, 0x85, 0x72, 0x69, 0x94, 0x22, 0xf3, + 0x01, 0xd1, 0x30, 0x6a, 0x22, 0x20, 0x2b, 0xa9, 0xd0, 0x6e, 0x5c, 0x85, 0xda, 0x02, 0x72, 0xf0, + 0x7d, 0xcb, 0xbc, 0x91, 0x92, 0x44, 0xff, 0x0c, 0x73, 0x5b, 0x99, 0x5f, 0xe8, 0x74, 0x4d, 0x16, + 0xcb, 0xb7, 0x56, 0xe7, 0x75, 0xdd, 0xb0, 0x19, 0x31, 0xfb, 0xc9, 0xe7, 0x1f, 0x7f, 0xbe, 0x6e, + 0xec, 0xa1, 0xdd, 0xe9, 0x6e, 0x2d, 0xdb, 0xfd, 0x5d, 0xf9, 0x82, 0xe3, 0x8f, 0xd3, 0xa0, 0x7f, + 0x42, 0x7f, 0x0d, 0x13, 0x29, 0x68, 0xbd, 0x28, 0xaa, 0xc6, 0x5b, 0xbb, 0xdb, 0xaa, 0xf1, 0xd6, + 0xef, 0x31, 0xfb, 0x71, 0xca, 0xfb, 0x01, 0xba, 0x7f, 0x2d, 0xef, 0xd4, 0x65, 0x65, 0x08, 0x55, + 0x76, 0xb9, 0x7c, 0x50, 0x57, 0x76, 0x59, 0x33, 0x94, 0xf5, 0x2e, 0xe7, 0x53, 0xb4, 0xd4, 0x65, + 0x05, 0xad, 0x0e, 0x97, 0x9b, 0xe0, 0xad, 0x5f, 0x46, 0x7a, 0x97, 0x17, 0x78, 0xa7, 0x2e, 0x2b, + 0x93, 0xa4, 0xb2, 0xcb, 0xe5, 0xd3, 0xb6, 0xb2, 0xcb, 0x9a, 0xc9, 0xaa, 0x77, 0x39, 0x1f, 0x85, + 0xa5, 0x2e, 0x2b, 0x68, 0x75, 0xb8, 0xdc, 0x04, 0x6f, 0xfd, 0x46, 0xd1, 0xbb, 0xbc, 0xc0, 0xfb, + 0x10, 0x2e, 0xc7, 0x96, 0x71, 0x35, 0xb6, 0x8c, 0xdf, 0x63, 0xcb, 0xf8, 0x32, 0xb1, 0x5a, 0x57, + 0x13, 0xab, 0xf5, 0x73, 0x62, 0xb5, 0xde, 0xbc, 0x08, 0x42, 0x71, 0x36, 0x74, 0x1d, 0x8f, 0x0e, + 0x70, 0xd6, 0x66, 0xb7, 0xec, 0x97, 0xa4, 0x3b, 0x6b, 0xb4, 0x2b, 0x7f, 0x4a, 0xde, 0xcf, 0x97, + 0x14, 0x17, 0x0c, 0xb8, 0x7b, 0x33, 0xfd, 0x19, 0x79, 0xfa, 0x3f, 0x00, 0x00, 0xff, 0xff, 0xf5, + 0xe3, 0xd8, 0xe9, 0x52, 0x0e, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -913,7 +915,7 @@ var _Query_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "dclupgrade/query.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/dclupgrade/query.proto", } func (m *QueryGetProposedUpgradeRequest) Marshal() (dAtA []byte, err error) { diff --git a/x/dclupgrade/types/query.pb.gw.go b/x/dclupgrade/types/query.pb.gw.go index 370bfc28b..5188d7f02 100644 --- a/x/dclupgrade/types/query.pb.gw.go +++ b/x/dclupgrade/types/query.pb.gw.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: dclupgrade/query.proto +// source: zigbeealliance/distributedcomplianceledger/dclupgrade/query.proto /* Package types is a reverse proxy. @@ -612,17 +612,17 @@ func RegisterQueryHandlerClient(ctx context.Context, mux *runtime.ServeMux, clie } var ( - pattern_Query_ProposedUpgrade_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "dclupgrade", "proposed-upgrades", "name"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_ProposedUpgrade_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "dclupgrade", "proposed-upgrades", "name"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_ProposedUpgradeAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "dclupgrade", "proposed-upgrades"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_ProposedUpgradeAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "dclupgrade", "proposed-upgrades"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_ApprovedUpgrade_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "dclupgrade", "approved-upgrades", "name"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_ApprovedUpgrade_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "dclupgrade", "approved-upgrades", "name"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_ApprovedUpgradeAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "dclupgrade", "approved-upgrades"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_ApprovedUpgradeAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "dclupgrade", "approved-upgrades"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_RejectedUpgrade_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "dclupgrade", "rejected-upgrades", "name"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_RejectedUpgrade_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "dclupgrade", "rejected-upgrades", "name"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_RejectedUpgradeAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "dclupgrade", "rejected-upgrades"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_RejectedUpgradeAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "dclupgrade", "rejected-upgrades"}, "", runtime.AssumeColonVerbOpt(false))) ) var ( diff --git a/x/dclupgrade/types/rejected_upgrade.pb.go b/x/dclupgrade/types/rejected_upgrade.pb.go index ef253deee..5eda1c429 100644 --- a/x/dclupgrade/types/rejected_upgrade.pb.go +++ b/x/dclupgrade/types/rejected_upgrade.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclupgrade/rejected_upgrade.proto +// source: zigbeealliance/distributedcomplianceledger/dclupgrade/rejected_upgrade.proto package types @@ -7,8 +7,8 @@ import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" types "github.com/cosmos/cosmos-sdk/x/upgrade/types" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -36,7 +36,7 @@ func (m *RejectedUpgrade) Reset() { *m = RejectedUpgrade{} } func (m *RejectedUpgrade) String() string { return proto.CompactTextString(m) } func (*RejectedUpgrade) ProtoMessage() {} func (*RejectedUpgrade) Descriptor() ([]byte, []int) { - return fileDescriptor_688c74160bb15b26, []int{0} + return fileDescriptor_9591340410a3709a, []int{0} } func (m *RejectedUpgrade) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -97,32 +97,35 @@ func init() { proto.RegisterType((*RejectedUpgrade)(nil), "zigbeealliance.distributedcomplianceledger.dclupgrade.RejectedUpgrade") } -func init() { proto.RegisterFile("dclupgrade/rejected_upgrade.proto", fileDescriptor_688c74160bb15b26) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclupgrade/rejected_upgrade.proto", fileDescriptor_9591340410a3709a) +} -var fileDescriptor_688c74160bb15b26 = []byte{ - // 351 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x91, 0x3f, 0x4b, 0xc3, 0x40, - 0x14, 0xc0, 0x93, 0xb6, 0x58, 0x9a, 0x0e, 0x42, 0x28, 0x12, 0x8b, 0xc4, 0x2a, 0x0e, 0x5d, 0x72, - 0x47, 0x2b, 0x3a, 0xb9, 0xd8, 0xc5, 0xc1, 0x45, 0x22, 0x3a, 0x74, 0x29, 0x97, 0xdc, 0xe3, 0x8c, - 0x5c, 0x73, 0xc7, 0xdd, 0xb5, 0xa8, 0x9f, 0xc2, 0xaf, 0x22, 0xf8, 0x21, 0x3a, 0x16, 0x27, 0x27, - 0x91, 0xf6, 0x8b, 0x48, 0x7b, 0x09, 0xad, 0xab, 0xb8, 0xe5, 0xfd, 0xc9, 0xef, 0xf7, 0xde, 0x3d, - 0xef, 0x88, 0xa6, 0x7c, 0x22, 0x99, 0x22, 0x14, 0xb0, 0x82, 0x47, 0x48, 0x0d, 0xd0, 0x51, 0x91, - 0x40, 0x52, 0x09, 0x23, 0xfc, 0xb3, 0x97, 0x8c, 0x25, 0x00, 0x84, 0xf3, 0x8c, 0xe4, 0x29, 0x20, - 0x9a, 0x69, 0xa3, 0xb2, 0x64, 0x62, 0x80, 0xa6, 0x62, 0x2c, 0x6d, 0x96, 0x03, 0x65, 0xa0, 0xd0, - 0x86, 0xd6, 0x6e, 0x31, 0xc1, 0xc4, 0x9a, 0x80, 0x57, 0x5f, 0x16, 0xd6, 0xde, 0x4f, 0x85, 0x1e, - 0x0b, 0x3d, 0xb2, 0x05, 0x1b, 0x14, 0xa5, 0x13, 0x1b, 0xe1, 0x72, 0x9c, 0x69, 0x2f, 0x01, 0x43, - 0x7a, 0xf8, 0xd7, 0x34, 0xed, 0xbd, 0xad, 0x81, 0x99, 0x22, 0xb9, 0xb1, 0xf9, 0xe3, 0xb7, 0x8a, - 0xb7, 0x1b, 0x17, 0x0b, 0xdc, 0xd9, 0xba, 0x7f, 0xee, 0xd5, 0x24, 0x27, 0x79, 0xe0, 0x76, 0xdc, - 0x6e, 0xb3, 0x7f, 0x80, 0x0a, 0x5d, 0x09, 0x2c, 0x04, 0xe8, 0x86, 0x93, 0x7c, 0x50, 0x9b, 0x7d, - 0x1d, 0x3a, 0xf1, 0xba, 0xdf, 0xef, 0x7b, 0xf5, 0x54, 0x01, 0x31, 0x42, 0x05, 0x95, 0x8e, 0xdb, - 0x6d, 0x0c, 0x82, 0x8f, 0xf7, 0xa8, 0x55, 0xfc, 0x7d, 0x49, 0xa9, 0x02, 0xad, 0x6f, 0x8d, 0xca, - 0x72, 0x16, 0x97, 0x8d, 0xfe, 0xd0, 0x6b, 0x10, 0x29, 0x95, 0x98, 0x12, 0xae, 0x83, 0x6a, 0xa7, - 0xda, 0x6d, 0xf6, 0x2f, 0xd0, 0x9f, 0x5e, 0x0e, 0x5d, 0xad, 0xd6, 0x8a, 0x37, 0x38, 0xff, 0xde, - 0xab, 0xdb, 0xdb, 0xe8, 0xa0, 0xf6, 0x0f, 0xe4, 0x12, 0x36, 0x80, 0xd9, 0x22, 0x74, 0xe7, 0x8b, - 0xd0, 0xfd, 0x5e, 0x84, 0xee, 0xeb, 0x32, 0x74, 0xe6, 0xcb, 0xd0, 0xf9, 0x5c, 0x86, 0xce, 0xf0, - 0x9a, 0x65, 0xe6, 0x61, 0x92, 0xa0, 0x54, 0x8c, 0xb1, 0x55, 0x45, 0xa5, 0x0b, 0x6f, 0xb9, 0xa2, - 0x8d, 0x2c, 0xb2, 0x36, 0xfc, 0x84, 0xb7, 0x0e, 0x64, 0x9e, 0x25, 0xe8, 0x64, 0x67, 0x7d, 0xa1, - 0xd3, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x5e, 0xb6, 0x31, 0xc0, 0x6c, 0x02, 0x00, 0x00, +var fileDescriptor_9591340410a3709a = []byte{ + // 360 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x91, 0xbf, 0x4a, 0x2b, 0x41, + 0x14, 0xc6, 0x77, 0x93, 0x70, 0x43, 0x36, 0xc5, 0x85, 0x25, 0xc5, 0xde, 0x70, 0xd9, 0x1b, 0x2e, + 0x16, 0x69, 0x76, 0x96, 0x44, 0xb4, 0xb2, 0x49, 0x1a, 0x0b, 0x2d, 0x64, 0x45, 0x8b, 0x34, 0x61, + 0x76, 0xe7, 0x30, 0xae, 0x4c, 0x76, 0x86, 0x99, 0x49, 0x50, 0x9f, 0xc2, 0x57, 0x11, 0x7c, 0x88, + 0x94, 0xc1, 0xca, 0x4a, 0x24, 0x79, 0x11, 0x49, 0x66, 0x96, 0x18, 0x10, 0x8b, 0x60, 0xb7, 0xe7, + 0xcf, 0xfe, 0xce, 0xf7, 0xcd, 0xe7, 0x9d, 0x3f, 0xe4, 0x34, 0x05, 0xc0, 0x8c, 0xe5, 0xb8, 0xc8, + 0x20, 0x26, 0xb9, 0xd2, 0x32, 0x4f, 0xa7, 0x1a, 0x48, 0xc6, 0x27, 0xc2, 0x74, 0x19, 0x10, 0x0a, + 0x32, 0x26, 0x19, 0x9b, 0x0a, 0x2a, 0x31, 0x81, 0x58, 0xc2, 0x2d, 0x64, 0x1a, 0xc8, 0xd8, 0x36, + 0x90, 0x90, 0x5c, 0x73, 0xff, 0x68, 0x97, 0x86, 0xbe, 0xa1, 0xa1, 0x2d, 0xad, 0xdd, 0xa2, 0x9c, + 0xf2, 0x0d, 0x21, 0x5e, 0x7f, 0x19, 0x58, 0xfb, 0x4f, 0xc6, 0xd5, 0x84, 0xab, 0xb1, 0x19, 0x98, + 0xc2, 0x8e, 0x0e, 0x4c, 0x15, 0x97, 0x72, 0x66, 0xbd, 0x14, 0x34, 0xee, 0xc5, 0x3b, 0x6a, 0xda, + 0x83, 0xfd, 0xbc, 0x51, 0x89, 0x0b, 0x6d, 0x10, 0xff, 0x9f, 0x2a, 0xde, 0xef, 0xc4, 0x7a, 0xbd, + 0x32, 0x73, 0xff, 0xd8, 0xab, 0x09, 0x86, 0x8b, 0xc0, 0xed, 0xb8, 0xdd, 0x66, 0xff, 0x2f, 0xb2, + 0xca, 0xca, 0xdb, 0x56, 0x0b, 0xba, 0x60, 0xb8, 0x18, 0xd6, 0xe6, 0x6f, 0xff, 0x9c, 0x64, 0xb3, + 0xef, 0xf7, 0xbd, 0x7a, 0x26, 0x01, 0x6b, 0x2e, 0x83, 0x4a, 0xc7, 0xed, 0x36, 0x86, 0xc1, 0xcb, + 0x73, 0xd4, 0xb2, 0x7f, 0x0f, 0x08, 0x91, 0xa0, 0xd4, 0xa5, 0x96, 0x79, 0x41, 0x93, 0x72, 0xd1, + 0x1f, 0x79, 0x0d, 0x2c, 0x84, 0xe4, 0x33, 0xcc, 0x54, 0x50, 0xed, 0x54, 0xbb, 0xcd, 0xfe, 0x09, + 0xda, 0xeb, 0x91, 0xd1, 0xe9, 0xda, 0x56, 0xb2, 0xc5, 0xf9, 0xd7, 0x5e, 0xdd, 0xc4, 0xa8, 0x82, + 0xda, 0x0f, 0x90, 0x4b, 0xd8, 0x10, 0xe6, 0xcb, 0xd0, 0x5d, 0x2c, 0x43, 0xf7, 0x7d, 0x19, 0xba, + 0x8f, 0xab, 0xd0, 0x59, 0xac, 0x42, 0xe7, 0x75, 0x15, 0x3a, 0xa3, 0x33, 0x9a, 0xeb, 0x9b, 0x69, + 0x8a, 0x32, 0x3e, 0x89, 0xcd, 0xa9, 0xe8, 0xab, 0x70, 0xa2, 0xed, 0xb1, 0xc8, 0xc6, 0x73, 0xf7, + 0x39, 0x20, 0x7d, 0x2f, 0x40, 0xa5, 0xbf, 0x36, 0x09, 0x1d, 0x7e, 0x04, 0x00, 0x00, 0xff, 0xff, + 0x94, 0x47, 0x47, 0xfc, 0xc2, 0x02, 0x00, 0x00, } func (m *RejectedUpgrade) Marshal() (dAtA []byte, err error) { diff --git a/x/dclupgrade/types/tx.pb.go b/x/dclupgrade/types/tx.pb.go index 6b22d80dc..e24a420e2 100644 --- a/x/dclupgrade/types/tx.pb.go +++ b/x/dclupgrade/types/tx.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: dclupgrade/tx.proto +// source: zigbeealliance/distributedcomplianceledger/dclupgrade/tx.proto package types @@ -8,9 +8,9 @@ import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" types "github.com/cosmos/cosmos-sdk/x/upgrade/types" - _ "github.com/gogo/protobuf/gogoproto" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" @@ -41,7 +41,7 @@ func (m *MsgProposeUpgrade) Reset() { *m = MsgProposeUpgrade{} } func (m *MsgProposeUpgrade) String() string { return proto.CompactTextString(m) } func (*MsgProposeUpgrade) ProtoMessage() {} func (*MsgProposeUpgrade) Descriptor() ([]byte, []int) { - return fileDescriptor_c6618e43a1731fe5, []int{0} + return fileDescriptor_1c32c3f3434423b8, []int{0} } func (m *MsgProposeUpgrade) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -105,7 +105,7 @@ func (m *MsgProposeUpgradeResponse) Reset() { *m = MsgProposeUpgradeResp func (m *MsgProposeUpgradeResponse) String() string { return proto.CompactTextString(m) } func (*MsgProposeUpgradeResponse) ProtoMessage() {} func (*MsgProposeUpgradeResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_c6618e43a1731fe5, []int{1} + return fileDescriptor_1c32c3f3434423b8, []int{1} } func (m *MsgProposeUpgradeResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -145,7 +145,7 @@ func (m *MsgApproveUpgrade) Reset() { *m = MsgApproveUpgrade{} } func (m *MsgApproveUpgrade) String() string { return proto.CompactTextString(m) } func (*MsgApproveUpgrade) ProtoMessage() {} func (*MsgApproveUpgrade) Descriptor() ([]byte, []int) { - return fileDescriptor_c6618e43a1731fe5, []int{2} + return fileDescriptor_1c32c3f3434423b8, []int{2} } func (m *MsgApproveUpgrade) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -209,7 +209,7 @@ func (m *MsgApproveUpgradeResponse) Reset() { *m = MsgApproveUpgradeResp func (m *MsgApproveUpgradeResponse) String() string { return proto.CompactTextString(m) } func (*MsgApproveUpgradeResponse) ProtoMessage() {} func (*MsgApproveUpgradeResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_c6618e43a1731fe5, []int{3} + return fileDescriptor_1c32c3f3434423b8, []int{3} } func (m *MsgApproveUpgradeResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -249,7 +249,7 @@ func (m *MsgRejectUpgrade) Reset() { *m = MsgRejectUpgrade{} } func (m *MsgRejectUpgrade) String() string { return proto.CompactTextString(m) } func (*MsgRejectUpgrade) ProtoMessage() {} func (*MsgRejectUpgrade) Descriptor() ([]byte, []int) { - return fileDescriptor_c6618e43a1731fe5, []int{4} + return fileDescriptor_1c32c3f3434423b8, []int{4} } func (m *MsgRejectUpgrade) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -313,7 +313,7 @@ func (m *MsgRejectUpgradeResponse) Reset() { *m = MsgRejectUpgradeRespon func (m *MsgRejectUpgradeResponse) String() string { return proto.CompactTextString(m) } func (*MsgRejectUpgradeResponse) ProtoMessage() {} func (*MsgRejectUpgradeResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_c6618e43a1731fe5, []int{5} + return fileDescriptor_1c32c3f3434423b8, []int{5} } func (m *MsgRejectUpgradeResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -351,42 +351,44 @@ func init() { proto.RegisterType((*MsgRejectUpgradeResponse)(nil), "zigbeealliance.distributedcomplianceledger.dclupgrade.MsgRejectUpgradeResponse") } -func init() { proto.RegisterFile("dclupgrade/tx.proto", fileDescriptor_c6618e43a1731fe5) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/dclupgrade/tx.proto", fileDescriptor_1c32c3f3434423b8) +} -var fileDescriptor_c6618e43a1731fe5 = []byte{ - // 506 bytes of a gzipped FileDescriptorProto +var fileDescriptor_1c32c3f3434423b8 = []byte{ + // 510 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x94, 0xc1, 0x8a, 0xd3, 0x40, - 0x18, 0xc7, 0x3b, 0x36, 0x28, 0x8e, 0x28, 0x9a, 0x5d, 0x30, 0x1b, 0x25, 0x5b, 0x82, 0x87, 0xc2, - 0xd2, 0xc4, 0x5d, 0x75, 0x41, 0xc1, 0xc3, 0xf6, 0xa2, 0x22, 0xc5, 0x12, 0xf1, 0xe2, 0x45, 0x26, - 0xc9, 0xe7, 0x38, 0x92, 0x64, 0xe2, 0xcc, 0xb4, 0x54, 0x6f, 0x3e, 0x81, 0xbe, 0x86, 0xe0, 0xd1, - 0x87, 0xd8, 0x83, 0x87, 0xc5, 0x8b, 0x9e, 0x16, 0x69, 0xdf, 0xc0, 0x27, 0x10, 0x33, 0x9d, 0x6d, - 0xd3, 0x7a, 0xda, 0x76, 0x6f, 0xf3, 0xf5, 0xfb, 0xe6, 0xff, 0xff, 0xfd, 0xe9, 0x97, 0xc1, 0x1b, - 0x69, 0x92, 0x0d, 0x4a, 0x2a, 0x48, 0x0a, 0xa1, 0x1a, 0x05, 0xa5, 0xe0, 0x8a, 0xdb, 0xf7, 0x3e, - 0x30, 0x1a, 0x03, 0x90, 0x2c, 0x63, 0xa4, 0x48, 0x20, 0x48, 0x99, 0x54, 0x82, 0xc5, 0x03, 0x05, - 0x69, 0xc2, 0xf3, 0x52, 0xff, 0x9a, 0x41, 0x4a, 0x41, 0x04, 0xb3, 0xfb, 0xee, 0x26, 0xe5, 0x94, - 0x57, 0x0a, 0xe1, 0xbf, 0x93, 0x16, 0x73, 0xb7, 0x12, 0x2e, 0x73, 0x2e, 0x5f, 0xe9, 0x86, 0x2e, - 0xa6, 0xad, 0x5b, 0xba, 0x0a, 0x0d, 0xc0, 0x70, 0x37, 0x06, 0x45, 0x76, 0x4d, 0xad, 0xa7, 0xfc, - 0x9f, 0x08, 0x5f, 0xeb, 0x49, 0xda, 0x17, 0xbc, 0xe4, 0x12, 0x5e, 0xe8, 0x9e, 0xfd, 0x04, 0x5f, - 0x48, 0x04, 0x10, 0xc5, 0x85, 0x83, 0x5a, 0xa8, 0x7d, 0xb1, 0x1b, 0xfe, 0x39, 0xde, 0xde, 0x18, - 0x92, 0x8c, 0xa5, 0x44, 0xc1, 0x03, 0x5f, 0xc0, 0xbb, 0x01, 0x13, 0x90, 0xfa, 0x3f, 0xbe, 0x75, - 0x36, 0xa7, 0xae, 0x07, 0x69, 0x2a, 0x40, 0xca, 0xe7, 0x4a, 0xb0, 0x82, 0x46, 0xe6, 0xbe, 0xbd, - 0x8f, 0xad, 0x32, 0x23, 0x85, 0x73, 0xae, 0x85, 0xda, 0x97, 0xf6, 0x6e, 0x06, 0xd3, 0x69, 0x43, - 0x31, 0xa5, 0x0a, 0xfa, 0x19, 0x29, 0xba, 0xd6, 0xe1, 0xf1, 0x76, 0x23, 0xaa, 0xe6, 0xed, 0x1d, - 0x6c, 0xb1, 0xe2, 0x35, 0x77, 0x9a, 0x95, 0xff, 0xf5, 0xba, 0x7f, 0x4e, 0x46, 0x0f, 0xef, 0xde, - 0xbe, 0xbf, 0xef, 0x47, 0xd5, 0x90, 0x6d, 0x63, 0x4b, 0xb1, 0x1c, 0x1c, 0xab, 0x85, 0xda, 0xcd, - 0xa8, 0x3a, 0xfb, 0x37, 0xf0, 0xd6, 0x52, 0xb0, 0x08, 0x64, 0xc9, 0x0b, 0x09, 0xfe, 0x77, 0x1d, - 0xfb, 0xa0, 0x2c, 0x05, 0x1f, 0x9e, 0x45, 0xec, 0x1d, 0x6c, 0x15, 0x24, 0x87, 0x2a, 0xf6, 0x12, - 0xfe, 0x89, 0x4e, 0x54, 0x0d, 0xad, 0x2b, 0x6b, 0x3d, 0xcd, 0x49, 0xd6, 0x8f, 0x08, 0x5f, 0xed, - 0x49, 0x1a, 0xc1, 0x5b, 0x48, 0x94, 0x89, 0xea, 0x2c, 0x44, 0x9d, 0x91, 0xdb, 0xf3, 0xe4, 0xeb, - 0x02, 0x74, 0xb1, 0xb3, 0x88, 0x60, 0xf8, 0xf6, 0x3e, 0x59, 0xb8, 0xd9, 0x93, 0xd4, 0xfe, 0x8a, - 0xf0, 0x95, 0x85, 0x3d, 0x7c, 0x1c, 0x9c, 0xea, 0x63, 0x09, 0x96, 0xfe, 0x78, 0xb7, 0xbf, 0x2e, - 0x25, 0x83, 0x5d, 0xe1, 0x2e, 0xec, 0xcf, 0x0a, 0xb8, 0x75, 0xa5, 0x55, 0x70, 0xff, 0xbf, 0x05, - 0xf6, 0x17, 0x84, 0x2f, 0xd7, 0x57, 0xe0, 0xd1, 0xe9, 0x3d, 0x6a, 0x42, 0xee, 0xb3, 0x35, 0x09, - 0x19, 0xd6, 0x2e, 0x1c, 0x8e, 0x3d, 0x74, 0x34, 0xf6, 0xd0, 0xef, 0xb1, 0x87, 0x3e, 0x4f, 0xbc, - 0xc6, 0xd1, 0xc4, 0x6b, 0xfc, 0x9a, 0x78, 0x8d, 0x97, 0x4f, 0x29, 0x53, 0x6f, 0x06, 0x71, 0x90, - 0xf0, 0x3c, 0xd4, 0xa6, 0x1d, 0xe3, 0x1a, 0xce, 0xb9, 0x76, 0x66, 0xb6, 0x1d, 0xed, 0x1b, 0x8e, - 0xc2, 0xf9, 0xc7, 0xf8, 0x7d, 0x09, 0x32, 0x3e, 0x5f, 0x3d, 0x81, 0x77, 0xfe, 0x06, 0x00, 0x00, - 0xff, 0xff, 0x59, 0x25, 0x85, 0x35, 0xa7, 0x05, 0x00, 0x00, + 0x18, 0xc7, 0x3b, 0x36, 0x28, 0x8e, 0x28, 0x1a, 0x17, 0xcc, 0x46, 0xc9, 0x96, 0xe0, 0xa1, 0xb0, + 0x34, 0x71, 0x57, 0x5d, 0x50, 0x50, 0xd8, 0x5e, 0x54, 0xa4, 0x58, 0x22, 0x5e, 0xbc, 0xc8, 0x24, + 0xf9, 0x1c, 0x47, 0x92, 0x4c, 0x9c, 0x99, 0x96, 0xea, 0xcd, 0x27, 0xd0, 0xd7, 0x10, 0x3c, 0xfa, + 0x10, 0x7b, 0xf0, 0xb0, 0x78, 0xd1, 0xd3, 0x22, 0xed, 0x1b, 0xf8, 0x04, 0xe2, 0x4c, 0xb2, 0x35, + 0xed, 0xe2, 0x61, 0xb7, 0xde, 0x66, 0x3a, 0xdf, 0xf7, 0xff, 0xff, 0xfe, 0xf4, 0xcb, 0x87, 0xef, + 0xbf, 0x63, 0x34, 0x06, 0x20, 0x59, 0xc6, 0x48, 0x91, 0x40, 0x98, 0x32, 0xa9, 0x04, 0x8b, 0x47, + 0x0a, 0xd2, 0x84, 0xe7, 0xa5, 0xf9, 0x35, 0x83, 0x94, 0x82, 0x08, 0xd3, 0x24, 0x1b, 0x95, 0x54, + 0x90, 0x14, 0x42, 0x35, 0x09, 0x4a, 0xc1, 0x15, 0xb7, 0x6f, 0x37, 0xfb, 0x83, 0x7f, 0xf4, 0x07, + 0xf3, 0x7e, 0x77, 0x8d, 0x72, 0xca, 0xb5, 0x42, 0xf8, 0xe7, 0x64, 0xc4, 0xdc, 0xf5, 0x84, 0xcb, + 0x9c, 0xcb, 0x17, 0xe6, 0xc1, 0x5c, 0xaa, 0xa7, 0xeb, 0xe6, 0x16, 0xd6, 0x00, 0xe3, 0xad, 0x18, + 0x14, 0xd9, 0xaa, 0xef, 0xa6, 0xca, 0xff, 0x8e, 0xf0, 0xa5, 0x81, 0xa4, 0x43, 0xc1, 0x4b, 0x2e, + 0xe1, 0x99, 0x79, 0xb3, 0x1f, 0xe1, 0x33, 0x89, 0x00, 0xa2, 0xb8, 0x70, 0x50, 0x07, 0x75, 0xcf, + 0xf6, 0xc3, 0x5f, 0x07, 0x1b, 0x97, 0xc7, 0x24, 0x63, 0x29, 0x51, 0x70, 0xd7, 0x17, 0xf0, 0x66, + 0xc4, 0x04, 0xa4, 0xfe, 0xb7, 0x2f, 0xbd, 0xb5, 0xca, 0x75, 0x37, 0x4d, 0x05, 0x48, 0xf9, 0x54, + 0x09, 0x56, 0xd0, 0xa8, 0xee, 0xb7, 0x77, 0xb0, 0x55, 0x66, 0xa4, 0x70, 0x4e, 0x75, 0x50, 0xf7, + 0xdc, 0xf6, 0xb5, 0xa0, 0xaa, 0xae, 0x29, 0x2a, 0xaa, 0x60, 0x98, 0x91, 0xa2, 0x6f, 0xed, 0x1d, + 0x6c, 0xb4, 0x22, 0x5d, 0x6f, 0x6f, 0x62, 0x8b, 0x15, 0x2f, 0xb9, 0xd3, 0xd6, 0xfe, 0x57, 0x9a, + 0xfe, 0x39, 0x99, 0xdc, 0xbb, 0x75, 0xe3, 0xce, 0x8e, 0x1f, 0xe9, 0x22, 0xdb, 0xc6, 0x96, 0x62, + 0x39, 0x38, 0x56, 0x07, 0x75, 0xdb, 0x91, 0x3e, 0xfb, 0x57, 0xf1, 0xfa, 0x52, 0xb0, 0x08, 0x64, + 0xc9, 0x0b, 0x09, 0xfe, 0x57, 0x13, 0x7b, 0xb7, 0x2c, 0x05, 0x1f, 0xff, 0x8f, 0xd8, 0x9b, 0xd8, + 0x2a, 0x48, 0x0e, 0x3a, 0xf6, 0x12, 0xfe, 0xa1, 0x4e, 0xa4, 0x8b, 0x56, 0x95, 0xb5, 0x99, 0xe6, + 0x30, 0xeb, 0x7b, 0x84, 0x2f, 0x0e, 0x24, 0x8d, 0xe0, 0x35, 0x24, 0xaa, 0x8e, 0xea, 0x2c, 0x44, + 0x9d, 0x93, 0xdb, 0x7f, 0x93, 0xaf, 0x0a, 0xd0, 0xc5, 0xce, 0x22, 0x42, 0xcd, 0xb7, 0xfd, 0xc1, + 0xc2, 0xed, 0x81, 0xa4, 0xf6, 0x67, 0x84, 0x2f, 0x2c, 0xcc, 0xe1, 0xc3, 0xe0, 0x58, 0x1f, 0x4b, + 0xb0, 0xf4, 0xc7, 0xbb, 0xc3, 0x55, 0x29, 0xd5, 0xd8, 0x1a, 0x77, 0x61, 0x7e, 0x4e, 0x80, 0xdb, + 0x54, 0x3a, 0x09, 0xee, 0xd1, 0x53, 0x60, 0x7f, 0x42, 0xf8, 0x7c, 0x73, 0x04, 0x1e, 0x1c, 0xdf, + 0xa3, 0x21, 0xe4, 0x3e, 0x59, 0x91, 0x50, 0xcd, 0xda, 0x87, 0xbd, 0xa9, 0x87, 0xf6, 0xa7, 0x1e, + 0xfa, 0x39, 0xf5, 0xd0, 0xc7, 0x99, 0xd7, 0xda, 0x9f, 0x79, 0xad, 0x1f, 0x33, 0xaf, 0xf5, 0xfc, + 0x31, 0x65, 0xea, 0xd5, 0x28, 0x0e, 0x12, 0x9e, 0x87, 0xc6, 0xb4, 0x77, 0xd4, 0x22, 0xee, 0xcd, + 0x6d, 0x7b, 0xd5, 0x2a, 0x9e, 0x34, 0x96, 0xf1, 0xdb, 0x12, 0x64, 0x7c, 0x5a, 0xaf, 0xc0, 0x9b, + 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0xf6, 0x4a, 0x2b, 0x58, 0xd2, 0x05, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -538,7 +540,7 @@ var _Msg_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "dclupgrade/tx.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/dclupgrade/tx.proto", } func (m *MsgProposeUpgrade) Marshal() (dAtA []byte, err error) { diff --git a/x/model/client/cli/query.go b/x/model/client/cli/query.go index 543a264ec..f7a82076f 100644 --- a/x/model/client/cli/query.go +++ b/x/model/client/cli/query.go @@ -13,7 +13,7 @@ import ( ) // GetQueryCmd returns the cli query commands for this module. -func GetQueryCmd(queryRoute string) *cobra.Command { +func GetQueryCmd(_ string) *cobra.Command { // Group model queries under a subcommand cmd := &cobra.Command{ Use: types.ModuleName, diff --git a/x/model/client/cli/query_model.go b/x/model/client/cli/query_model.go index abccf7aec..b0a8547dd 100644 --- a/x/model/client/cli/query_model.go +++ b/x/model/client/cli/query_model.go @@ -61,7 +61,10 @@ func CmdShowModel() *cobra.Command { Short: "Query Model by combination of Vendor ID and Product ID", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.Model return cli.QueryWithProof( diff --git a/x/model/client/cli/query_model_test.go b/x/model/client/cli/query_model_test.go index aa1688750..20941a766 100644 --- a/x/model/client/cli/query_model_test.go +++ b/x/model/client/cli/query_model_test.go @@ -5,10 +5,10 @@ import ( "strconv" "testing" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" cliutils "github.com/zigbee-alliance/distributed-compliance-ledger/utils/cli" diff --git a/x/model/client/cli/query_model_version.go b/x/model/client/cli/query_model_version.go index da05926ea..c26fde122 100644 --- a/x/model/client/cli/query_model_version.go +++ b/x/model/client/cli/query_model_version.go @@ -20,7 +20,10 @@ func CmdShowModelVersion() *cobra.Command { Short: "Query Model Version by combination of Vendor ID, Product ID and Software Version", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.ModelVersion return cli.QueryWithProof( diff --git a/x/model/client/cli/query_model_version_test.go b/x/model/client/cli/query_model_version_test.go index 9509dd399..f254a0038 100644 --- a/x/model/client/cli/query_model_version_test.go +++ b/x/model/client/cli/query_model_version_test.go @@ -5,9 +5,9 @@ import ( "strconv" "testing" + tmcli "github.com/cometbft/cometbft/libs/cli" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" cliutils "github.com/zigbee-alliance/distributed-compliance-ledger/utils/cli" diff --git a/x/model/client/cli/query_model_versions.go b/x/model/client/cli/query_model_versions.go index 5472c059d..76a095ee3 100644 --- a/x/model/client/cli/query_model_versions.go +++ b/x/model/client/cli/query_model_versions.go @@ -19,7 +19,10 @@ func CmdShowModelVersions() *cobra.Command { Short: "Query the list of all versions for a given Device Model", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.ModelVersions return cli.QueryWithProof( diff --git a/x/model/client/cli/query_model_versions_test.go b/x/model/client/cli/query_model_versions_test.go index 9d3566f40..f8b45971e 100644 --- a/x/model/client/cli/query_model_versions_test.go +++ b/x/model/client/cli/query_model_versions_test.go @@ -5,9 +5,9 @@ import ( "strconv" "testing" + tmcli "github.com/cometbft/cometbft/libs/cli" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" cliutils "github.com/zigbee-alliance/distributed-compliance-ledger/utils/cli" diff --git a/x/model/client/cli/query_vendor_products.go b/x/model/client/cli/query_vendor_products.go index 070e91894..921de6b45 100644 --- a/x/model/client/cli/query_vendor_products.go +++ b/x/model/client/cli/query_vendor_products.go @@ -16,7 +16,10 @@ func CmdShowVendorProducts() *cobra.Command { Short: "Query the list of Models for the given Vendor", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.VendorProducts return cli.QueryWithProof( diff --git a/x/model/client/cli/query_vendor_products_test.go b/x/model/client/cli/query_vendor_products_test.go index 98366be32..2a0399fef 100644 --- a/x/model/client/cli/query_vendor_products_test.go +++ b/x/model/client/cli/query_vendor_products_test.go @@ -5,9 +5,9 @@ import ( "strconv" "testing" + tmcli "github.com/cometbft/cometbft/libs/cli" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" cliutils "github.com/zigbee-alliance/distributed-compliance-ledger/utils/cli" diff --git a/x/model/client/cli/tx_model_test.go b/x/model/client/cli/tx_model_test.go index 699901bd4..0badf0689 100644 --- a/x/model/client/cli/tx_model_test.go +++ b/x/model/client/cli/tx_model_test.go @@ -6,7 +6,9 @@ import ( "testing" "github.com/cosmos/cosmos-sdk/client/flags" + authtx "github.com/cosmos/cosmos-sdk/x/auth/tx" "github.com/stretchr/testify/require" + testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" testcli "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" @@ -41,7 +43,7 @@ func TestCreateModel(t *testing.T) { common := []string{ fmt.Sprintf("--%s=%s", flags.FlagFrom, val.Address.String()), fmt.Sprintf("--%s", flags.FlagSkipConfirmation), - fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastBlock), + fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastSync), } for _, tc := range []struct { @@ -65,9 +67,14 @@ func TestCreateModel(t *testing.T) { } args = append(args, fields...) args = append(args, common...) - _, err := testcli.ExecTestCLITxCmd(t, ctx, cli.CmdCreateModel(), args) + txResp, err := testcli.ExecTestCLITxCmd(t, ctx, cli.CmdCreateModel(), args) + require.NoError(t, err) + err = net.WaitForNextBlock() + require.NoError(t, err) if tc.err != nil { - require.ErrorIs(t, err, tc.err) + resp, err := authtx.QueryTx(ctx, txResp.TxHash) + require.NoError(t, err) + require.Contains(t, resp.RawLog, tc.err.Error()) } else { require.NoError(t, err) } @@ -83,7 +90,7 @@ func TestUpdateModel(t *testing.T) { common := []string{ fmt.Sprintf("--%s=%s", flags.FlagFrom, val.Address.String()), fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), - fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastBlock), + fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastSync), } args := []string{ @@ -107,6 +114,8 @@ func TestUpdateModel(t *testing.T) { args = append(args, common...) _, err := testcli.ExecTestCLITxCmd(t, ctx, cli.CmdCreateModel(), args) require.NoError(t, err) + err = net.WaitForNextBlock() + require.NoError(t, err) fields := []string{ fmt.Sprintf("--%s=%v", cli.FlagProductName, testconstants.ProductName) + "-updated", @@ -150,9 +159,13 @@ func TestUpdateModel(t *testing.T) { } args = append(args, fields...) args = append(args, common...) - _, err := testcli.ExecTestCLITxCmd(t, ctx, cli.CmdUpdateModel(), args) + txResp, err := testcli.ExecTestCLITxCmd(t, ctx, cli.CmdUpdateModel(), args) + require.NoError(t, err) + err = net.WaitForNextBlock() if tc.err != nil { - require.ErrorIs(t, err, tc.err) + resp, err := authtx.QueryTx(ctx, txResp.TxHash) + require.NoError(t, err) + require.Contains(t, resp.RawLog, tc.err.Error()) } else { require.NoError(t, err) } @@ -169,7 +182,7 @@ func TestDeleteModel(t *testing.T) { common := []string{ fmt.Sprintf("--%s=%s", flags.FlagFrom, val.Address.String()), fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), - fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastBlock), + fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastSync), } args := []string{ @@ -193,6 +206,8 @@ func TestDeleteModel(t *testing.T) { args = append(args, common...) _, err := testcli.ExecTestCLITxCmd(t, ctx, cli.CmdCreateModel(), args) require.NoError(t, err) + err = net.WaitForNextBlock() + require.NoError(t, err) for _, tc := range []struct { desc string @@ -221,9 +236,13 @@ func TestDeleteModel(t *testing.T) { fmt.Sprintf("--%s=%v", cli.FlagPid, tc.idPid), } args = append(args, common...) - _, err := testcli.ExecTestCLITxCmd(t, ctx, cli.CmdDeleteModel(), args) + txResp, err := testcli.ExecTestCLITxCmd(t, ctx, cli.CmdDeleteModel(), args) + waitErr := net.WaitForNextBlock() + require.NoError(t, waitErr) if tc.err != nil { - require.ErrorIs(t, err, tc.err) + resp, err := authtx.QueryTx(ctx, txResp.TxHash) + require.NoError(t, err) + require.Contains(t, resp.RawLog, tc.err.Error()) } else { require.NoError(t, err) } diff --git a/x/model/client/cli/tx_model_version_test.go b/x/model/client/cli/tx_model_version_test.go index 33bb5a467..dab5963df 100644 --- a/x/model/client/cli/tx_model_version_test.go +++ b/x/model/client/cli/tx_model_version_test.go @@ -6,7 +6,9 @@ import ( "testing" "github.com/cosmos/cosmos-sdk/client/flags" + authtx "github.com/cosmos/cosmos-sdk/x/auth/tx" "github.com/stretchr/testify/require" + testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" testcli "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" @@ -59,7 +61,7 @@ func TestCreateModelVersion(t *testing.T) { common := []string{ fmt.Sprintf("--%s=%s", flags.FlagFrom, val.Address.String()), fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), - fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastBlock), + fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastSync), } for _, tc := range []struct { @@ -86,9 +88,13 @@ func TestCreateModelVersion(t *testing.T) { } args = append(args, fields...) args = append(args, common...) - _, err := testcli.ExecTestCLITxCmd(t, ctx, cli.CmdCreateModelVersion(), args) + txResp, err := testcli.ExecTestCLITxCmd(t, ctx, cli.CmdCreateModelVersion(), args) + waitErr := net.WaitForNextBlock() + require.NoError(t, waitErr) if tc.err != nil { - require.ErrorIs(t, err, tc.err) + resp, err := authtx.QueryTx(ctx, txResp.TxHash) + require.NoError(t, err) + require.Contains(t, resp.RawLog, tc.err.Error()) } else { require.NoError(t, err) } @@ -104,7 +110,7 @@ func TestUpdateModelVersion(t *testing.T) { common := []string{ fmt.Sprintf("--%s=%s", flags.FlagFrom, val.Address.String()), fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), - fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastBlock), + fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastSync), } args := []string{ @@ -126,6 +132,8 @@ func TestUpdateModelVersion(t *testing.T) { args = append(args, common...) _, err := testcli.ExecTestCLITxCmd(t, ctx, cli.CmdCreateModelVersion(), args) require.NoError(t, err) + err = net.WaitForNextBlock() + require.NoError(t, err) fields := []string{ fmt.Sprintf("--%s=%v", cli.FlagSoftwareVersionValid, !testconstants.SoftwareVersionValid), @@ -166,9 +174,13 @@ func TestUpdateModelVersion(t *testing.T) { } args = append(args, fields...) args = append(args, common...) - _, err := testcli.ExecTestCLITxCmd(t, ctx, cli.CmdUpdateModelVersion(), args) + txResp, err := testcli.ExecTestCLITxCmd(t, ctx, cli.CmdUpdateModelVersion(), args) + waitErr := net.WaitForNextBlock() + require.NoError(t, waitErr) if tc.err != nil { - require.ErrorIs(t, err, tc.err) + resp, err := authtx.QueryTx(ctx, txResp.TxHash) + require.NoError(t, err) + require.Contains(t, resp.RawLog, tc.err.Error()) } else { require.NoError(t, err) } diff --git a/x/model/handler.go b/x/model/handler.go index 4c0ad4a9a..8759f074a 100644 --- a/x/model/handler.go +++ b/x/model/handler.go @@ -3,6 +3,7 @@ package model import ( "fmt" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/keeper" @@ -45,7 +46,7 @@ func NewHandler(k keeper.Keeper) sdk.Handler { default: errMsg := fmt.Sprintf("unrecognized %s message type: %T", types.ModuleName, msg) - return nil, sdkerrors.Wrap(sdkerrors.ErrUnknownRequest, errMsg) + return nil, errors.Wrap(sdkerrors.ErrUnknownRequest, errMsg) } } } diff --git a/x/model/handler_test.go b/x/model/handler_test.go index 43e135553..a28b9d4e5 100644 --- a/x/model/handler_test.go +++ b/x/model/handler_test.go @@ -26,8 +26,8 @@ import ( testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" testkeeper "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/testdata" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" commontypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/common/types" + dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" @@ -83,7 +83,7 @@ func (m *ComplianceKeeperMock) GetComplianceInfo( return val, args.Bool(0) } -var _ types.ComplianceKeeper = &ComplianceKeeperMock{} +var _ keeper.ComplianceKeeper = &ComplianceKeeperMock{} type TestSetup struct { T *testing.T diff --git a/x/model/keeper/check_model_rights.go b/x/model/keeper/check_model_rights.go index dd09839d1..00baf4ac0 100644 --- a/x/model/keeper/check_model_rights.go +++ b/x/model/keeper/check_model_rights.go @@ -1,6 +1,7 @@ package keeper import ( + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" @@ -9,17 +10,17 @@ import ( func checkModelRights(ctx sdk.Context, k Keeper, signer sdk.AccAddress, vid int32, pid int32, message string) error { // sender must have Vendor role to add new model if !k.dclauthKeeper.HasRole(ctx, signer, dclauthtypes.Vendor) { - return sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, "%s transaction should be "+ + return errors.Wrapf(sdkerrors.ErrUnauthorized, "%s transaction should be "+ "signed by an account with the %s role", message, dclauthtypes.Vendor) } if !k.dclauthKeeper.HasVendorID(ctx, signer, vid) { - return sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, "%s transaction should be "+ + return errors.Wrapf(sdkerrors.ErrUnauthorized, "%s transaction should be "+ "signed by a vendor account containing the vendorID %v ", message, vid) } if !k.dclauthKeeper.HasRightsToChange(ctx, signer, pid) { - return sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, "%s transaction should be "+ + return errors.Wrapf(sdkerrors.ErrUnauthorized, "%s transaction should be "+ "signed by a vendor account who has rights to modify the productID %v ", message, pid) } diff --git a/x/model/keeper/keeper.go b/x/model/keeper/keeper.go index a6bc2b831..7694cf029 100644 --- a/x/model/keeper/keeper.go +++ b/x/model/keeper/keeper.go @@ -3,30 +3,32 @@ package keeper import ( "fmt" + "github.com/cometbft/cometbft/libs/log" "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/tendermint/tendermint/libs/log" + compliancetypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" ) type ( Keeper struct { cdc codec.BinaryCodec - storeKey sdk.StoreKey - memKey sdk.StoreKey + storeKey storetypes.StoreKey + memKey storetypes.StoreKey dclauthKeeper types.DclauthKeeper - complianceKeeper types.ComplianceKeeper + complianceKeeper ComplianceKeeper } ) func NewKeeper( cdc codec.BinaryCodec, storeKey, - memKey sdk.StoreKey, + memKey storetypes.StoreKey, dclauthKeeper types.DclauthKeeper, - complianceKeeper types.ComplianceKeeper, + complianceKeeper ComplianceKeeper, ) *Keeper { return &Keeper{ cdc: cdc, @@ -38,10 +40,21 @@ func NewKeeper( } } -func (k *Keeper) SetComplianceKeeper(complianceKeeper types.ComplianceKeeper) { +func (k *Keeper) SetComplianceKeeper(complianceKeeper ComplianceKeeper) { k.complianceKeeper = complianceKeeper } func (k Keeper) Logger(ctx sdk.Context) log.Logger { return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) } + +type ComplianceKeeper interface { + // Methods imported from compliance should be defined here + GetComplianceInfo( + ctx sdk.Context, + vid int32, + pid int32, + softwareVersion uint32, + certificationType string, + ) (val compliancetypes.ComplianceInfo, found bool) +} diff --git a/x/model/keeper/msg_server_model.go b/x/model/keeper/msg_server_model.go index 8071adb6f..15ae92d4c 100644 --- a/x/model/keeper/msg_server_model.go +++ b/x/model/keeper/msg_server_model.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" @@ -14,7 +15,7 @@ func (k msgServer) CreateModel(goCtx context.Context, msg *types.MsgCreateModel) // check if signer has enough rights to create model signerAddr, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } if err := checkModelRights(ctx, k.Keeper, signerAddr, msg.Vid, msg.Pid, "MsgCreateModel"); err != nil { return nil, err @@ -85,7 +86,7 @@ func (k msgServer) UpdateModel(goCtx context.Context, msg *types.MsgUpdateModel) // check if signer has enough rights to update model signerAddr, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } if err := checkModelRights(ctx, k.Keeper, signerAddr, msg.Vid, msg.Pid, "MsgUpdateModel"); err != nil { return nil, err @@ -187,7 +188,7 @@ func (k msgServer) DeleteModel(goCtx context.Context, msg *types.MsgDeleteModel) // check if signer has enough rights to delete model signerAddr, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } if err := checkModelRights(ctx, k.Keeper, signerAddr, msg.Vid, msg.Pid, "MsgDeleteModel"); err != nil { diff --git a/x/model/keeper/msg_server_model_version.go b/x/model/keeper/msg_server_model_version.go index dfc2f4c2a..a032c8bd4 100644 --- a/x/model/keeper/msg_server_model_version.go +++ b/x/model/keeper/msg_server_model_version.go @@ -3,9 +3,11 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" + + dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/compliance/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" ) @@ -15,7 +17,7 @@ func (k msgServer) CreateModelVersion(goCtx context.Context, msg *types.MsgCreat // check signer has enough rights to create model version signerAddr, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } if err := checkModelRights(ctx, k.Keeper, signerAddr, msg.Vid, msg.Pid, "MsgCreateModelVersion"); err != nil { return nil, err @@ -79,7 +81,7 @@ func (k msgServer) UpdateModelVersion(goCtx context.Context, msg *types.MsgUpdat // check signer has enough rights to update model version signerAddr, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } if err := checkModelRights(ctx, k.Keeper, signerAddr, msg.Vid, msg.Pid, "MsgUpdateModelVersion"); err != nil { return nil, err @@ -148,7 +150,7 @@ func (k msgServer) DeleteModelVersion(goCtx context.Context, msg *types.MsgDelet // check signer has enough rights to delete model version signerAddr, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } if err := checkModelRights(ctx, k.Keeper, signerAddr, msg.Vid, msg.Pid, "MsgDeleteModelVersion"); err != nil { return nil, err diff --git a/x/model/keeper/msg_server_test.go b/x/model/keeper/msg_server_test.go index e7a9fe046..d5c7ffd22 100644 --- a/x/model/keeper/msg_server_test.go +++ b/x/model/keeper/msg_server_test.go @@ -10,7 +10,7 @@ import ( "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" ) -//nolint:deadcode,unused +//nolint:unused func setupMsgServer(tb testing.TB) (types.MsgServer, context.Context) { tb.Helper() k, ctx := keepertest.ModelKeeper(tb, nil, nil) diff --git a/x/model/module.go b/x/model/module.go index 96deba49d..7b1146385 100644 --- a/x/model/module.go +++ b/x/model/module.go @@ -12,10 +12,10 @@ import ( "github.com/cosmos/cosmos-sdk/types/module" // this line is used by starport scaffolding # 1. + abci "github.com/cometbft/cometbft/abci/types" "github.com/gorilla/mux" "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" - abci "github.com/tendermint/tendermint/abci/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/client/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" @@ -63,7 +63,7 @@ func (AppModuleBasic) DefaultGenesis(cdc codec.JSONCodec) json.RawMessage { } // ValidateGenesis performs genesis state validation for the capability module. -func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncodingConfig, bz json.RawMessage) error { +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, _ client.TxEncodingConfig, bz json.RawMessage) error { var genState types.GenesisState if err := cdc.UnmarshalJSON(bz, &genState); err != nil { return fmt.Errorf("failed to unmarshal %s genesis state: %w", types.ModuleName, err) @@ -73,7 +73,7 @@ func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncod } // RegisterRESTRoutes registers the capability module's REST service handlers. -func (AppModuleBasic) RegisterRESTRoutes(clientCtx client.Context, rtr *mux.Router) { +func (AppModuleBasic) RegisterRESTRoutes(_ client.Context, _ *mux.Router) { } // RegisterGRPCGatewayRoutes registers the gRPC Gateway routes for the module. @@ -114,19 +114,6 @@ func (am AppModule) Name() string { return am.AppModuleBasic.Name() } -// Route returns the capability module's message routing key. -func (am AppModule) Route() sdk.Route { - return sdk.NewRoute(types.RouterKey, NewHandler(am.keeper)) -} - -// QuerierRoute returns the capability module's query routing key. -func (AppModule) QuerierRoute() string { return types.QuerierRoute } - -// LegacyQuerierHandler returns the capability module's Querier. -func (am AppModule) LegacyQuerierHandler(legacyQuerierCdc *codec.LegacyAmino) sdk.Querier { - return nil -} - // RegisterServices registers a GRPC query service to respond to the // module-specific GRPC queries. func (am AppModule) RegisterServices(cfg module.Configurator) { diff --git a/x/model/module_simulation.go b/x/model/module_simulation.go index c19d2f25c..d2374de35 100644 --- a/x/model/module_simulation.go +++ b/x/model/module_simulation.go @@ -6,7 +6,7 @@ import ( "math/rand" "github.com/cosmos/cosmos-sdk/baseapp" - simappparams "github.com/cosmos/cosmos-sdk/simapp/params" + simappparams "cosmossdk.io/simapp/params" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/module" simtypes "github.com/cosmos/cosmos-sdk/types/simulation" diff --git a/x/model/simulation/model.go b/x/model/simulation/model.go index 058503321..16a0fb716 100644 --- a/x/model/simulation/model.go +++ b/x/model/simulation/model.go @@ -5,10 +5,11 @@ import ( "strconv" "github.com/cosmos/cosmos-sdk/baseapp" - simappparams "github.com/cosmos/cosmos-sdk/simapp/params" sdk "github.com/cosmos/cosmos-sdk/types" simtypes "github.com/cosmos/cosmos-sdk/types/simulation" "github.com/cosmos/cosmos-sdk/x/simulation" + + appEncoding "github.com/zigbee-alliance/distributed-compliance-ledger/app" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" ) @@ -42,7 +43,7 @@ func SimulateMsgCreateModel( txCtx := simulation.OperationInput{ R: r, App: app, - TxGen: simappparams.MakeTestEncodingConfig().TxConfig, + TxGen: appEncoding.MakeEncodingConfig().TxConfig, Cdc: nil, Msg: msg, MsgType: msg.Type(), @@ -92,7 +93,7 @@ func SimulateMsgUpdateModel( txCtx := simulation.OperationInput{ R: r, App: app, - TxGen: simappparams.MakeTestEncodingConfig().TxConfig, + TxGen: appEncoding.MakeEncodingConfig().TxConfig, Cdc: nil, Msg: msg, MsgType: msg.Type(), @@ -142,7 +143,7 @@ func SimulateMsgDeleteModel( txCtx := simulation.OperationInput{ R: r, App: app, - TxGen: simappparams.MakeTestEncodingConfig().TxConfig, + TxGen: appEncoding.MakeEncodingConfig().TxConfig, Cdc: nil, Msg: msg, MsgType: msg.Type(), diff --git a/x/model/simulation/model_version.go b/x/model/simulation/model_version.go index fd517d6e2..a02e5d07f 100644 --- a/x/model/simulation/model_version.go +++ b/x/model/simulation/model_version.go @@ -5,10 +5,10 @@ import ( "strconv" "github.com/cosmos/cosmos-sdk/baseapp" - simappparams "github.com/cosmos/cosmos-sdk/simapp/params" sdk "github.com/cosmos/cosmos-sdk/types" simtypes "github.com/cosmos/cosmos-sdk/types/simulation" "github.com/cosmos/cosmos-sdk/x/simulation" + appEnc "github.com/zigbee-alliance/distributed-compliance-ledger/app" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/model/types" ) @@ -43,7 +43,7 @@ func SimulateMsgCreateModelVersion( txCtx := simulation.OperationInput{ R: r, App: app, - TxGen: simappparams.MakeTestEncodingConfig().TxConfig, + TxGen: appEnc.MakeEncodingConfig().TxConfig, Cdc: nil, Msg: msg, MsgType: msg.Type(), @@ -93,7 +93,7 @@ func SimulateMsgUpdateModelVersion( txCtx := simulation.OperationInput{ R: r, App: app, - TxGen: simappparams.MakeTestEncodingConfig().TxConfig, + TxGen: appEnc.MakeEncodingConfig().TxConfig, Cdc: nil, Msg: msg, MsgType: msg.Type(), @@ -143,7 +143,7 @@ func SimulateMsgDeleteModelVersion( txCtx := simulation.OperationInput{ R: r, App: app, - TxGen: simappparams.MakeTestEncodingConfig().TxConfig, + TxGen: appEnc.MakeEncodingConfig().TxConfig, Cdc: nil, Msg: msg, MsgType: msg.Type(), diff --git a/x/model/types/codec.go b/x/model/types/codec.go index 96a4d1014..728482c22 100644 --- a/x/model/types/codec.go +++ b/x/model/types/codec.go @@ -30,7 +30,7 @@ func RegisterInterfaces(registry cdctypes.InterfaceRegistry) { ) // this line is used by starport scaffolding # 3 - msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) + msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) //nolint:nosnakecase } var ModuleCdc = codec.NewProtoCodec(cdctypes.NewInterfaceRegistry()) diff --git a/x/model/types/errors.go b/x/model/types/errors.go index 4676eb5bb..1050f9e1d 100644 --- a/x/model/types/errors.go +++ b/x/model/types/errors.go @@ -1,100 +1,100 @@ package types import ( - sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "cosmossdk.io/errors" ) var ( // Model Error Codes. - ErrModelAlreadyExists = sdkerrors.Register(ModuleName, 501, "model already exists") - ErrModelDoesNotExist = sdkerrors.Register(ModuleName, 502, "model does not exist") - ErrVendorProductsDoNotExist = sdkerrors.Register(ModuleName, 504, "vendor products do not exist") + ErrModelAlreadyExists = errors.Register(ModuleName, 501, "model already exists") + ErrModelDoesNotExist = errors.Register(ModuleName, 502, "model does not exist") + ErrVendorProductsDoNotExist = errors.Register(ModuleName, 504, "vendor products do not exist") // Model Version Error Codes. - ErrSoftwareVersionStringInvalid = sdkerrors.Register(ModuleName, 511, "software version string invalid") - ErrFirmwareInformationInvalid = sdkerrors.Register(ModuleName, 512, "firmware digests invalid") - ErrCDVersionNumberInvalid = sdkerrors.Register(ModuleName, 513, "CD version number invalid") - ErrOtaURLInvalid = sdkerrors.Register(ModuleName, 514, "OTA URL invalid") - ErrOtaMissingInformation = sdkerrors.Register(ModuleName, 515, "OTA missing information") - ErrReleaseNotesURLInvalid = sdkerrors.Register(ModuleName, 516, "release notes URL invalid") - ErrModelVersionDoesNotExist = sdkerrors.Register(ModuleName, 517, "model version does not exist") - ErrNoModelVersionsExist = sdkerrors.Register(ModuleName, 518, "no model versions exist") - ErrModelVersionAlreadyExists = sdkerrors.Register(ModuleName, 519, "model version already exists") - ErrOtaURLCannotBeSet = sdkerrors.Register(ModuleName, 520, "OTA URL cannot be set") - ErrMaxSVLessThanMinSV = sdkerrors.Register(ModuleName, 521, "max software version less than min software version") - ErrLsfRevisionIsNotValid = sdkerrors.Register(ModuleName, 522, "LsfRevision should monotonically increase by 1") - ErrLsfRevisionIsNotAllowed = sdkerrors.Register(ModuleName, 523, "LsfRevision is not allowed if LsfURL is not present") - ErrModelVersionDeletionCertified = sdkerrors.Register(ModuleName, 524, "model version has a compliance record and can not be deleted") - ErrModelDeletionCertified = sdkerrors.Register(ModuleName, 525, "model has a model version that has a compliance record and correcponding model can not be deleted") + ErrSoftwareVersionStringInvalid = errors.Register(ModuleName, 511, "software version string invalid") + ErrFirmwareInformationInvalid = errors.Register(ModuleName, 512, "firmware digests invalid") + ErrCDVersionNumberInvalid = errors.Register(ModuleName, 513, "CD version number invalid") + ErrOtaURLInvalid = errors.Register(ModuleName, 514, "OTA URL invalid") + ErrOtaMissingInformation = errors.Register(ModuleName, 515, "OTA missing information") + ErrReleaseNotesURLInvalid = errors.Register(ModuleName, 516, "release notes URL invalid") + ErrModelVersionDoesNotExist = errors.Register(ModuleName, 517, "model version does not exist") + ErrNoModelVersionsExist = errors.Register(ModuleName, 518, "no model versions exist") + ErrModelVersionAlreadyExists = errors.Register(ModuleName, 519, "model version already exists") + ErrOtaURLCannotBeSet = errors.Register(ModuleName, 520, "OTA URL cannot be set") + ErrMaxSVLessThanMinSV = errors.Register(ModuleName, 521, "max software version less than min software version") + ErrLsfRevisionIsNotValid = errors.Register(ModuleName, 522, "LsfRevision should monotonically increase by 1") + ErrLsfRevisionIsNotAllowed = errors.Register(ModuleName, 523, "LsfRevision is not allowed if LsfURL is not present") + ErrModelVersionDeletionCertified = errors.Register(ModuleName, 524, "model version has a compliance record and can not be deleted") + ErrModelDeletionCertified = errors.Register(ModuleName, 525, "model has a model version that has a compliance record and correcponding model can not be deleted") ) func NewErrModelAlreadyExists(vid interface{}, pid interface{}) error { - return sdkerrors.Wrapf(ErrModelAlreadyExists, + return errors.Wrapf(ErrModelAlreadyExists, "Model associated with vid=%v and pid=%v already exists on the ledger", vid, pid) } func NewErrModelDoesNotExist(vid interface{}, pid interface{}) error { - return sdkerrors.Wrapf(ErrModelDoesNotExist, + return errors.Wrapf(ErrModelDoesNotExist, "No model associated with vid=%v and pid=%v exist on the ledger", vid, pid) } func NewErrVendorProductsDoNotExist(vid interface{}) error { - return sdkerrors.Wrapf(ErrVendorProductsDoNotExist, + return errors.Wrapf(ErrVendorProductsDoNotExist, "No vendor products associated with vid=%v exist on the ledger", vid) } func NewErrSoftwareVersionStringInvalid(softwareVersion interface{}) error { - return sdkerrors.Wrapf(ErrSoftwareVersionStringInvalid, + return errors.Wrapf(ErrSoftwareVersionStringInvalid, "SoftwareVersionString %v is invalid. It should be greater then 1 and less then 64 character long", softwareVersion) } func NewErrFirmwareInformationInvalid(firmwareInformation interface{}) error { - return sdkerrors.Wrapf(ErrFirmwareInformationInvalid, + return errors.Wrapf(ErrFirmwareInformationInvalid, "firmwareInformation %v is of invalid length. Maximum length should be less then 512", firmwareInformation) } func NewErrCDVersionNumberInvalid(cdVersionNumber interface{}) error { - return sdkerrors.Wrapf(ErrCDVersionNumberInvalid, + return errors.Wrapf(ErrCDVersionNumberInvalid, "CDVersionNumber %v is invalid. It should be a 16 bit unsigned integer", cdVersionNumber) } func NewErrOtaURLInvalid(otaURL interface{}) error { - return sdkerrors.Wrapf(ErrOtaURLInvalid, + return errors.Wrapf(ErrOtaURLInvalid, "OtaURL %v is invalid. Maximum length should be less then 256", otaURL) } func NewErrOtaMissingInformation() error { - return sdkerrors.Wrap(ErrOtaMissingInformation, + return errors.Wrap(ErrOtaMissingInformation, "OtaFileSize, OtaChecksum and OtaChecksumType are required if OtaUrl is provided") } func NewErrReleaseNotesURLInvalid(releaseNotesURL interface{}) error { - return sdkerrors.Wrapf(ErrReleaseNotesURLInvalid, + return errors.Wrapf(ErrReleaseNotesURLInvalid, "ReleaseNotesURLInvalid %v is invalid. Maximum length should be less then 256", releaseNotesURL) } func NewErrModelVersionDoesNotExist(vid interface{}, pid interface{}, softwareVersion interface{}) error { - return sdkerrors.Wrapf(ErrModelVersionDoesNotExist, + return errors.Wrapf(ErrModelVersionDoesNotExist, "No model version associated with vid=%v, pid=%v and softwareVersion=%v exist on the ledger", vid, pid, softwareVersion) } func NewErrNoModelVersionsExist(vid interface{}, pid interface{}) error { - return sdkerrors.Wrapf(ErrNoModelVersionsExist, + return errors.Wrapf(ErrNoModelVersionsExist, "No versions associated with vid=%v and pid=%v exist on the ledger", vid, pid) } func NewErrModelVersionAlreadyExists(vid interface{}, pid interface{}, softwareVersion interface{}) error { - return sdkerrors.Wrapf(ErrModelVersionAlreadyExists, + return errors.Wrapf(ErrModelVersionAlreadyExists, "Model Version already exists on ledger with vid=%v pid=%v and softwareVersion=%v exist on the ledger", vid, pid, softwareVersion) } @@ -102,32 +102,32 @@ func NewErrModelVersionAlreadyExists(vid interface{}, pid interface{}, softwareV func NewErrMaxSVLessThanMinSV(minApplicableSoftwareVersion interface{}, maxApplicableSoftwareVersion interface{}, ) error { - return sdkerrors.Wrapf(ErrMaxSVLessThanMinSV, + return errors.Wrapf(ErrMaxSVLessThanMinSV, "MaxApplicableSoftwareVersion %v is less than MinApplicableSoftwareVersion %v", maxApplicableSoftwareVersion, minApplicableSoftwareVersion) } func NewErrLsfRevisionIsNotAllowed() error { - return sdkerrors.Wrapf(ErrLsfRevisionIsNotValid, + return errors.Wrapf(ErrLsfRevisionIsNotValid, "LsfRevision is only applicable if LsfURL is not present") } func NewErrLsfRevisionIsNotValid(previousLsfVersion interface{}, currentLsfVersion interface{}, ) error { - return sdkerrors.Wrapf(ErrLsfRevisionIsNotValid, + return errors.Wrapf(ErrLsfRevisionIsNotValid, "LsfRevision %v should be greater then existing lsfRevision %v by 1", currentLsfVersion, previousLsfVersion) } func NewErrModelDeletionCertified(vid interface{}, pid interface{}, softwareVersion interface{}) error { - return sdkerrors.Wrapf(ErrModelDeletionCertified, + return errors.Wrapf(ErrModelDeletionCertified, "Model version associated with vid=%v, pid=%v and softwareVersion=%v has a compliance record and the corresponding model can't be deleted", vid, pid, softwareVersion) } func NewErrModelVersionDeletionCertified(vid interface{}, pid interface{}, softwareVersion interface{}) error { - return sdkerrors.Wrapf(ErrModelVersionDeletionCertified, + return errors.Wrapf(ErrModelVersionDeletionCertified, "Model version associated with vid=%v, pid=%v and softwareVersion=%v has a compliance record and can't be deleted", vid, pid, softwareVersion) } diff --git a/x/model/types/expected_keepers.go b/x/model/types/expected_keepers.go index 5e4c0c7c2..17f9f964a 100644 --- a/x/model/types/expected_keepers.go +++ b/x/model/types/expected_keepers.go @@ -2,7 +2,7 @@ package types import ( sdk "github.com/cosmos/cosmos-sdk/types" - dclcompltypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/compliance" + "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) @@ -12,14 +12,3 @@ type DclauthKeeper interface { HasVendorID(ctx sdk.Context, addr sdk.AccAddress, vid int32) bool HasRightsToChange(ctx sdk.Context, addr sdk.AccAddress, pid int32) bool } - -type ComplianceKeeper interface { - // Methods imported from compliance should be defined here - GetComplianceInfo( - ctx sdk.Context, - vid int32, - pid int32, - softwareVersion uint32, - certificationType string, - ) (val dclcompltypes.ComplianceInfo, found bool) -} diff --git a/x/model/types/genesis.pb.go b/x/model/types/genesis.pb.go index b9dfc18a8..fc182b684 100644 --- a/x/model/types/genesis.pb.go +++ b/x/model/types/genesis.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: model/genesis.proto +// source: zigbeealliance/distributedcomplianceledger/model/genesis.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -35,7 +35,7 @@ func (m *GenesisState) Reset() { *m = GenesisState{} } func (m *GenesisState) String() string { return proto.CompactTextString(m) } func (*GenesisState) ProtoMessage() {} func (*GenesisState) Descriptor() ([]byte, []int) { - return fileDescriptor_e97faa271237aab5, []int{0} + return fileDescriptor_0e1f6ec318373958, []int{0} } func (m *GenesisState) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -96,31 +96,34 @@ func init() { proto.RegisterType((*GenesisState)(nil), "zigbeealliance.distributedcomplianceledger.model.GenesisState") } -func init() { proto.RegisterFile("model/genesis.proto", fileDescriptor_e97faa271237aab5) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/model/genesis.proto", fileDescriptor_0e1f6ec318373958) +} -var fileDescriptor_e97faa271237aab5 = []byte{ - // 334 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x92, 0x31, 0x4b, 0x33, 0x31, - 0x18, 0xc7, 0xef, 0xde, 0x96, 0x17, 0x8c, 0x0e, 0x36, 0x3a, 0x68, 0x85, 0x28, 0x4e, 0x2e, 0x4d, - 0x44, 0x07, 0x37, 0x95, 0x0e, 0xba, 0x28, 0x88, 0x42, 0x07, 0x1d, 0x6a, 0xef, 0xf2, 0x10, 0x03, - 0xd7, 0xcb, 0x91, 0xa4, 0x45, 0xfd, 0x14, 0x7e, 0xac, 0xe2, 0xd4, 0xd1, 0x49, 0xa4, 0xfd, 0x22, - 0xd2, 0x24, 0xa5, 0x27, 0xed, 0x52, 0x5d, 0x42, 0xf2, 0x7f, 0xc8, 0xef, 0xf7, 0xe4, 0x21, 0x68, - 0xa3, 0xab, 0x38, 0x64, 0x4c, 0x40, 0x0e, 0x46, 0x1a, 0x5a, 0x68, 0x65, 0x15, 0x3e, 0x7c, 0x95, - 0x22, 0x01, 0xe8, 0x64, 0x99, 0xec, 0xe4, 0x29, 0x50, 0x2e, 0x8d, 0xd5, 0x32, 0xe9, 0x59, 0xe0, - 0xa9, 0xea, 0x16, 0x3e, 0xcd, 0x80, 0x0b, 0xd0, 0xd4, 0xdd, 0xaf, 0xef, 0x78, 0x4c, 0x1f, 0x72, - 0xae, 0x74, 0xbb, 0xd0, 0x8a, 0xf7, 0x52, 0x1b, 0x70, 0xf5, 0x9a, 0x2f, 0xba, 0x35, 0x44, 0xdb, - 0xa5, 0xa8, 0xdd, 0x07, 0x6d, 0xa4, 0xca, 0x43, 0xa9, 0xbe, 0xa0, 0x34, 0x25, 0x6d, 0x0a, 0x25, - 0x94, 0xdb, 0xb2, 0xc9, 0xce, 0xa7, 0xfb, 0xef, 0x15, 0xb4, 0x76, 0xe9, 0x1f, 0x70, 0x67, 0x3b, - 0x16, 0x70, 0x1f, 0x61, 0xdf, 0xc9, 0x4d, 0x68, 0xe4, 0x4a, 0x1a, 0xbb, 0x15, 0xef, 0x55, 0x0e, - 0x56, 0x8f, 0xce, 0xe9, 0xb2, 0x8f, 0xa3, 0xad, 0x1f, 0xac, 0x66, 0x75, 0xf0, 0xb9, 0x1b, 0xdd, - 0x2e, 0x30, 0xe0, 0x07, 0xb4, 0xe2, 0x6e, 0x38, 0xdd, 0x3f, 0xa7, 0x3b, 0x59, 0x5e, 0x77, 0x3d, - 0x59, 0x83, 0x65, 0xc6, 0xc3, 0x05, 0x5a, 0x77, 0x87, 0x96, 0x1f, 0x89, 0x73, 0x54, 0x9c, 0xe3, - 0xf4, 0x97, 0x8e, 0x40, 0x0a, 0xaa, 0x39, 0x3a, 0x36, 0xa8, 0x56, 0xce, 0xfc, 0x14, 0xab, 0x4e, - 0x79, 0xf6, 0x37, 0xe5, 0x74, 0x88, 0xf3, 0xfc, 0xe6, 0xe3, 0x60, 0x44, 0xe2, 0xe1, 0x88, 0xc4, - 0x5f, 0x23, 0x12, 0xbf, 0x8d, 0x49, 0x34, 0x1c, 0x93, 0xe8, 0x63, 0x4c, 0xa2, 0xfb, 0x0b, 0x21, - 0xed, 0x53, 0x2f, 0xa1, 0xa9, 0xea, 0x32, 0x6f, 0x6f, 0x4c, 0xf5, 0xac, 0xa4, 0x6f, 0xcc, 0xfc, - 0x0d, 0xdf, 0x00, 0x7b, 0xf6, 0xbf, 0x89, 0xd9, 0x97, 0x02, 0x4c, 0xf2, 0xdf, 0xfd, 0x9a, 0xe3, - 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x2c, 0x08, 0xde, 0x8c, 0xfb, 0x02, 0x00, 0x00, +var fileDescriptor_0e1f6ec318373958 = []byte{ + // 339 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0xab, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0xe7, 0xe6, 0xa7, 0xa4, 0xe6, 0xe8, 0xa7, 0xa7, 0xe6, 0xa5, 0x16, 0x67, 0x16, + 0xeb, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x19, 0xa0, 0xea, 0xd7, 0xc3, 0xa3, 0x5f, 0x0f, 0xac, + 0x5f, 0xca, 0x8d, 0x64, 0x1b, 0xcb, 0x52, 0xf3, 0x52, 0xf2, 0x8b, 0xe2, 0x0b, 0x8a, 0xf2, 0x53, + 0x4a, 0x93, 0x4b, 0xa0, 0x36, 0x4b, 0xd9, 0x90, 0x6c, 0x0e, 0x98, 0x84, 0xea, 0x76, 0x21, 0x4f, + 0x77, 0x7c, 0x59, 0x6a, 0x51, 0x71, 0x66, 0x7e, 0x1e, 0xd4, 0x14, 0x57, 0xca, 0x4c, 0x81, 0x79, + 0x45, 0x24, 0x3d, 0x3f, 0x3d, 0x1f, 0xcc, 0xd4, 0x07, 0xb1, 0x20, 0xa2, 0x4a, 0xa7, 0x98, 0xb9, + 0x78, 0xdc, 0x21, 0x81, 0x1d, 0x5c, 0x92, 0x58, 0x92, 0x2a, 0x54, 0xc6, 0x25, 0x04, 0x09, 0x8a, + 0x00, 0x68, 0x48, 0xf8, 0x64, 0x16, 0x97, 0x48, 0x30, 0x2a, 0x30, 0x6b, 0x70, 0x1b, 0x39, 0xe8, + 0x91, 0x1a, 0x11, 0x7a, 0x61, 0x28, 0x66, 0x39, 0xb1, 0x9c, 0xb8, 0x27, 0xcf, 0x10, 0x84, 0xc5, + 0x06, 0xa1, 0x68, 0x2e, 0x4e, 0xb0, 0x0e, 0xb0, 0x75, 0x4c, 0x60, 0xeb, 0xcc, 0x49, 0xb7, 0xce, + 0x17, 0x44, 0x42, 0x6d, 0x41, 0x98, 0x27, 0x54, 0xc0, 0x25, 0x00, 0xe6, 0x84, 0x41, 0x82, 0x04, + 0x6c, 0x07, 0x33, 0xd8, 0x0e, 0x3b, 0x32, 0xed, 0x80, 0x9a, 0x04, 0xb5, 0x0a, 0xc3, 0x74, 0xa1, + 0x62, 0x2e, 0x41, 0x64, 0x31, 0x48, 0x28, 0xb2, 0x80, 0xad, 0xb4, 0xa7, 0xcc, 0x4a, 0x58, 0x20, + 0x62, 0x9a, 0xef, 0x94, 0x70, 0xe2, 0x91, 0x1c, 0xe3, 0x85, 0x47, 0x72, 0x8c, 0x0f, 0x1e, 0xc9, + 0x31, 0x4e, 0x78, 0x2c, 0xc7, 0x70, 0xe1, 0xb1, 0x1c, 0xc3, 0x8d, 0xc7, 0x72, 0x0c, 0x51, 0x6e, + 0xe9, 0x99, 0x25, 0x19, 0xa5, 0x49, 0x7a, 0xc9, 0xf9, 0xb9, 0xfa, 0x10, 0xdb, 0x75, 0xb1, 0xa5, + 0x27, 0x5d, 0x84, 0xfd, 0xba, 0xd0, 0x14, 0x55, 0x01, 0x4d, 0x53, 0x25, 0x95, 0x05, 0xa9, 0xc5, + 0x49, 0x6c, 0xe0, 0x54, 0x63, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0x77, 0x39, 0x8f, 0x7b, 0xd2, + 0x03, 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { diff --git a/x/model/types/messages_model.go b/x/model/types/messages_model.go index c22a512cf..bc20d1812 100644 --- a/x/model/types/messages_model.go +++ b/x/model/types/messages_model.go @@ -1,8 +1,10 @@ package types import ( + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" ) @@ -84,7 +86,7 @@ func (msg *MsgCreateModel) GetSignBytes() []byte { func (msg *MsgCreateModel) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } err = validator.Validate(msg) @@ -163,7 +165,7 @@ func (msg *MsgUpdateModel) GetSignBytes() []byte { func (msg *MsgUpdateModel) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } err = validator.Validate(msg) @@ -214,7 +216,7 @@ func (msg *MsgDeleteModel) GetSignBytes() []byte { func (msg *MsgDeleteModel) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } err = validator.Validate(msg) diff --git a/x/model/types/messages_model_test.go b/x/model/types/messages_model_test.go index 391e63d59..3bd94f287 100644 --- a/x/model/types/messages_model_test.go +++ b/x/model/types/messages_model_test.go @@ -3,15 +3,14 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" ) -//nolint:goconst func TestMsgCreateModel_ValidateBasic(t *testing.T) { negativeTests := []struct { name string diff --git a/x/model/types/messages_model_version.go b/x/model/types/messages_model_version.go index 871adeb7b..af3b337e3 100644 --- a/x/model/types/messages_model_version.go +++ b/x/model/types/messages_model_version.go @@ -1,6 +1,7 @@ package types import ( + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" @@ -78,7 +79,7 @@ func (msg *MsgCreateModelVersion) GetSignBytes() []byte { func (msg *MsgCreateModelVersion) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } err = validator.Validate(msg) @@ -147,7 +148,7 @@ func (msg *MsgUpdateModelVersion) GetSignBytes() []byte { func (msg *MsgUpdateModelVersion) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } err = validator.Validate(msg) @@ -195,7 +196,7 @@ func (msg *MsgDeleteModelVersion) GetSignBytes() []byte { func (msg *MsgDeleteModelVersion) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } err = validator.Validate(msg) diff --git a/x/model/types/messages_model_version_test.go b/x/model/types/messages_model_version_test.go index 00ad64786..a120c28fb 100644 --- a/x/model/types/messages_model_version_test.go +++ b/x/model/types/messages_model_version_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" diff --git a/x/model/types/model.pb.go b/x/model/types/model.pb.go index dd1007059..81822a632 100644 --- a/x/model/types/model.pb.go +++ b/x/model/types/model.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: model/model.proto +// source: zigbeealliance/distributedcomplianceledger/model/model.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -50,7 +50,7 @@ func (m *Model) Reset() { *m = Model{} } func (m *Model) String() string { return proto.CompactTextString(m) } func (*Model) ProtoMessage() {} func (*Model) Descriptor() ([]byte, []int) { - return fileDescriptor_312ac5bcab6cbb43, []int{0} + return fileDescriptor_363bb1b07b19a129, []int{0} } func (m *Model) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -223,45 +223,47 @@ func init() { proto.RegisterType((*Model)(nil), "zigbeealliance.distributedcomplianceledger.model.Model") } -func init() { proto.RegisterFile("model/model.proto", fileDescriptor_312ac5bcab6cbb43) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/model/model.proto", fileDescriptor_363bb1b07b19a129) +} -var fileDescriptor_312ac5bcab6cbb43 = []byte{ - // 551 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xc1, 0x6e, 0x13, 0x31, - 0x14, 0xcc, 0x52, 0x92, 0x50, 0xb7, 0x81, 0xd6, 0x54, 0x60, 0x8a, 0xb4, 0x0a, 0x85, 0x43, 0x84, - 0x94, 0x2c, 0x82, 0x0b, 0x27, 0x04, 0x45, 0xaa, 0x88, 0xd4, 0x56, 0x68, 0x43, 0x39, 0xf4, 0x00, - 0xec, 0xae, 0x5f, 0x53, 0x4b, 0xf6, 0xda, 0xb2, 0xbd, 0x81, 0xf0, 0x15, 0x7c, 0x0c, 0xff, 0x00, - 0xc7, 0x8a, 0x13, 0x47, 0x94, 0xfc, 0x08, 0xb2, 0x37, 0x21, 0x89, 0x50, 0xd3, 0xf6, 0x12, 0xc5, - 0x33, 0xf3, 0x5e, 0x26, 0xde, 0xd9, 0x41, 0x9b, 0x42, 0x52, 0xe0, 0x91, 0xff, 0xec, 0x28, 0x2d, - 0xad, 0xc4, 0x4f, 0xbe, 0xb2, 0x7e, 0x0a, 0x90, 0x70, 0xce, 0x92, 0x3c, 0x83, 0x0e, 0x65, 0xc6, - 0x6a, 0x96, 0x16, 0x16, 0x68, 0x26, 0x85, 0x2a, 0x51, 0x0e, 0xb4, 0x0f, 0xba, 0xe3, 0xe7, 0xb6, - 0xef, 0x65, 0xd2, 0x08, 0x69, 0x3e, 0xfa, 0xf9, 0xa8, 0x3c, 0x94, 0xcb, 0x76, 0x7e, 0xd4, 0x51, - 0xf5, 0xc0, 0x89, 0xf0, 0x06, 0x5a, 0x19, 0x30, 0x4a, 0x82, 0x66, 0xd0, 0xaa, 0xc6, 0xee, 0xab, - 0x43, 0x14, 0xa3, 0xe4, 0x5a, 0x89, 0x28, 0x46, 0xf1, 0x0e, 0x5a, 0xa7, 0x30, 0x60, 0x19, 0xbc, - 0x1b, 0x2a, 0xe8, 0x52, 0xb2, 0xe2, 0xa9, 0x05, 0x0c, 0x37, 0xd1, 0x9a, 0xd2, 0x92, 0x16, 0x99, - 0x3d, 0x4c, 0x04, 0x90, 0xeb, 0xcd, 0xa0, 0xb5, 0x1a, 0xcf, 0x43, 0x6e, 0xcb, 0xe4, 0xb8, 0x9f, - 0xa4, 0xc0, 0x49, 0xd5, 0x4b, 0x16, 0x30, 0x1c, 0x22, 0xa4, 0x12, 0x6d, 0x0f, 0x0b, 0x91, 0x82, - 0x26, 0x35, 0xaf, 0x98, 0x43, 0xf0, 0x73, 0x74, 0x37, 0x93, 0x42, 0x30, 0x63, 0x98, 0xcc, 0x59, - 0xde, 0x7f, 0x5d, 0x18, 0x2b, 0xc5, 0x1e, 0x97, 0x9f, 0x49, 0xdd, 0x9b, 0x3a, 0x8f, 0xc6, 0x2f, - 0xd0, 0xf6, 0x39, 0xd4, 0x91, 0xe6, 0xe4, 0x86, 0xff, 0xa5, 0x25, 0x0a, 0xbc, 0x8f, 0x1e, 0x2c, - 0xb0, 0xee, 0xf6, 0xba, 0x39, 0xb3, 0x2c, 0xe1, 0x3d, 0x0b, 0xca, 0xbc, 0x61, 0xb9, 0x25, 0xab, - 0xcd, 0xa0, 0xd5, 0x88, 0x2f, 0x16, 0xe2, 0x63, 0xd4, 0x5a, 0x2a, 0xea, 0xe6, 0xc6, 0xea, 0x22, - 0xb3, 0x4c, 0xe6, 0x04, 0x79, 0x6f, 0x97, 0xd6, 0xe3, 0xb7, 0xe8, 0xe1, 0x7f, 0xda, 0x1e, 0x64, - 0x32, 0xa7, 0x89, 0x1e, 0xce, 0xbc, 0xae, 0x79, 0xaf, 0x97, 0x91, 0xe2, 0x0f, 0xe8, 0xf1, 0x05, - 0xb2, 0x79, 0xbf, 0xeb, 0xde, 0xef, 0x15, 0x26, 0xf0, 0x23, 0xd4, 0x28, 0x0c, 0xe8, 0x83, 0x24, - 0x2f, 0x12, 0xee, 0x1e, 0x47, 0xc3, 0xaf, 0x58, 0x04, 0x5d, 0x36, 0x4c, 0xa1, 0x94, 0xd4, 0xd6, - 0x49, 0x6e, 0x96, 0xd9, 0x98, 0x21, 0x3e, 0x3b, 0x65, 0x96, 0x1c, 0x7f, 0x6b, 0x92, 0x9d, 0x7f, - 0x08, 0xbe, 0x83, 0x6a, 0xdc, 0x9c, 0x38, 0x6e, 0xc3, 0x73, 0x93, 0x93, 0x4b, 0x2e, 0x37, 0x27, - 0x31, 0x0c, 0x98, 0x33, 0x4b, 0x36, 0x7d, 0x8e, 0xe6, 0x21, 0xfc, 0x14, 0xd5, 0x33, 0x0d, 0x89, - 0x95, 0x9a, 0x60, 0x37, 0xba, 0x4b, 0x7e, 0x7d, 0x6f, 0x6f, 0x4d, 0x5e, 0xa8, 0x57, 0x94, 0x6a, - 0x30, 0xa6, 0x67, 0x35, 0xcb, 0xfb, 0xf1, 0x54, 0xe8, 0xfe, 0x93, 0xc9, 0x4e, 0x41, 0x24, 0xef, - 0x41, 0xfb, 0xbd, 0xb7, 0xfd, 0x7d, 0x2f, 0x82, 0xf8, 0x25, 0xba, 0x3f, 0xbb, 0x27, 0xd0, 0x31, - 0x08, 0x69, 0xe1, 0x88, 0x4d, 0x63, 0xb9, 0xe5, 0x8d, 0x2e, 0x93, 0xec, 0x7e, 0xfa, 0x39, 0x0a, - 0x83, 0xb3, 0x51, 0x18, 0xfc, 0x19, 0x85, 0xc1, 0xb7, 0x71, 0x58, 0x39, 0x1b, 0x87, 0x95, 0xdf, - 0xe3, 0xb0, 0x72, 0xbc, 0xd7, 0x67, 0xf6, 0xb4, 0x48, 0x3b, 0x99, 0x14, 0x51, 0xd9, 0x1d, 0xed, - 0x69, 0x79, 0x44, 0x73, 0xe5, 0xd1, 0x9e, 0xb5, 0x47, 0xbb, 0xac, 0x8f, 0xe8, 0x4b, 0x59, 0x3c, - 0x91, 0x1d, 0x2a, 0x30, 0x69, 0xcd, 0x57, 0xc6, 0xb3, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x87, - 0xf2, 0xba, 0x9d, 0x94, 0x04, 0x00, 0x00, +var fileDescriptor_363bb1b07b19a129 = []byte{ + // 553 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x41, 0x6f, 0xd3, 0x30, + 0x14, 0x5e, 0x18, 0x6d, 0x99, 0xb7, 0xc2, 0x30, 0x13, 0x98, 0x21, 0x45, 0x65, 0x70, 0xa8, 0x90, + 0xda, 0x22, 0xb8, 0x70, 0x40, 0x08, 0x86, 0x34, 0x51, 0x69, 0x9b, 0x50, 0xca, 0x38, 0xec, 0x00, + 0x24, 0xf1, 0x5b, 0x67, 0xc9, 0x8e, 0x2d, 0xdb, 0x29, 0x94, 0x5f, 0xc1, 0x8f, 0xe1, 0x3f, 0xc0, + 0x71, 0xe2, 0xc4, 0x11, 0xb5, 0x7f, 0x04, 0xd9, 0xc9, 0x68, 0x23, 0x58, 0x37, 0x2e, 0x51, 0xfc, + 0x7d, 0xdf, 0x7b, 0xf9, 0xf2, 0xf2, 0xe5, 0xa1, 0xa7, 0x9f, 0xd9, 0x30, 0x01, 0x88, 0x39, 0x67, + 0x71, 0x96, 0x42, 0x8f, 0x32, 0x63, 0x35, 0x4b, 0x72, 0x0b, 0x34, 0x95, 0x42, 0x15, 0x28, 0x07, + 0x3a, 0x04, 0xdd, 0x13, 0x92, 0x02, 0x2f, 0xae, 0x5d, 0xa5, 0xa5, 0x95, 0xf8, 0x61, 0xb5, 0xba, + 0xbb, 0xa0, 0xba, 0xeb, 0xeb, 0x36, 0x6f, 0xa7, 0xd2, 0x08, 0x69, 0xde, 0xfb, 0xfa, 0x5e, 0x71, + 0x28, 0x9a, 0x6d, 0x7d, 0x6b, 0xa0, 0xda, 0x9e, 0x13, 0xe1, 0x75, 0xb4, 0x3c, 0x62, 0x94, 0x04, + 0xad, 0xa0, 0x5d, 0x8b, 0xdc, 0xad, 0x43, 0x14, 0xa3, 0xe4, 0x52, 0x81, 0x28, 0x46, 0xf1, 0x16, + 0x5a, 0xa3, 0x30, 0x62, 0x29, 0xbc, 0x19, 0x2b, 0xe8, 0x53, 0xb2, 0xec, 0xa9, 0x0a, 0x86, 0x5b, + 0x68, 0x55, 0x69, 0x49, 0xf3, 0xd4, 0xee, 0xc7, 0x02, 0xc8, 0xe5, 0x56, 0xd0, 0x5e, 0x89, 0xe6, + 0x21, 0xd7, 0xa5, 0x3c, 0xee, 0xc6, 0x09, 0x70, 0x52, 0xf3, 0x92, 0x0a, 0x86, 0x43, 0x84, 0x54, + 0xac, 0xed, 0x7e, 0x2e, 0x12, 0xd0, 0xa4, 0xee, 0x15, 0x73, 0x08, 0x7e, 0x82, 0x6e, 0xa5, 0x52, + 0x08, 0x66, 0x0c, 0x93, 0x19, 0xcb, 0x86, 0x2f, 0x73, 0x63, 0xa5, 0xd8, 0xe1, 0xf2, 0x23, 0x69, + 0x78, 0x53, 0x67, 0xd1, 0xf8, 0x19, 0xda, 0x3c, 0x83, 0x3a, 0xd0, 0x9c, 0x5c, 0xf1, 0x4f, 0x5a, + 0xa0, 0xc0, 0xbb, 0xe8, 0x6e, 0x85, 0x75, 0xd3, 0xeb, 0x67, 0xcc, 0xb2, 0x98, 0x0f, 0x2c, 0x28, + 0xf3, 0x8a, 0x65, 0x96, 0xac, 0xb4, 0x82, 0x76, 0x33, 0x3a, 0x5f, 0x88, 0x0f, 0x51, 0x7b, 0xa1, + 0xa8, 0x9f, 0x19, 0xab, 0xf3, 0xd4, 0x32, 0x99, 0x11, 0xe4, 0xbd, 0x5d, 0x58, 0x8f, 0x5f, 0xa3, + 0x7b, 0x7f, 0x69, 0x07, 0x90, 0xca, 0x8c, 0xc6, 0x7a, 0x3c, 0xf3, 0xba, 0xea, 0xbd, 0x5e, 0x44, + 0x8a, 0xdf, 0xa1, 0x07, 0xe7, 0xc8, 0xe6, 0xfd, 0xae, 0x79, 0xbf, 0xff, 0x51, 0x81, 0xef, 0xa3, + 0x66, 0x6e, 0x40, 0xef, 0xc5, 0x59, 0x1e, 0x73, 0xf7, 0x39, 0x9a, 0xbe, 0x45, 0x15, 0x74, 0xd9, + 0x30, 0xb9, 0x52, 0x52, 0x5b, 0x27, 0xb9, 0x5a, 0x64, 0x63, 0x86, 0xf8, 0xec, 0x14, 0x59, 0x72, + 0xfc, 0xb5, 0x32, 0x3b, 0x7f, 0x10, 0x7c, 0x13, 0xd5, 0xb9, 0x39, 0x72, 0xdc, 0xba, 0xe7, 0xca, + 0x93, 0x4b, 0x2e, 0x37, 0x47, 0x11, 0x8c, 0x98, 0x33, 0x4b, 0xae, 0xfb, 0x1c, 0xcd, 0x43, 0xf8, + 0x11, 0x6a, 0xa4, 0x1a, 0x62, 0x2b, 0x35, 0xc1, 0xae, 0x74, 0x9b, 0xfc, 0xf8, 0xda, 0xd9, 0x28, + 0x7f, 0xa8, 0x17, 0x94, 0x6a, 0x30, 0x66, 0x60, 0x35, 0xcb, 0x86, 0xd1, 0xa9, 0xd0, 0xbd, 0x93, + 0x49, 0x8f, 0x41, 0xc4, 0x6f, 0x41, 0xfb, 0xbe, 0x37, 0xfc, 0xbc, 0xab, 0x20, 0x7e, 0x8e, 0xee, + 0xcc, 0xe6, 0x04, 0x3a, 0x02, 0x21, 0x2d, 0x1c, 0xb0, 0xd3, 0x58, 0x6e, 0x78, 0xa3, 0x8b, 0x24, + 0xdb, 0x1f, 0xbe, 0x4f, 0xc2, 0xe0, 0x64, 0x12, 0x06, 0xbf, 0x26, 0x61, 0xf0, 0x65, 0x1a, 0x2e, + 0x9d, 0x4c, 0xc3, 0xa5, 0x9f, 0xd3, 0x70, 0xe9, 0x70, 0x67, 0xc8, 0xec, 0x71, 0x9e, 0x74, 0x53, + 0x29, 0x7a, 0xc5, 0xee, 0xe8, 0xfc, 0x6b, 0xf5, 0x74, 0x66, 0xdb, 0xa3, 0x53, 0x2e, 0x9f, 0x4f, + 0xe5, 0xfa, 0xb1, 0x63, 0x05, 0x26, 0xa9, 0xfb, 0x95, 0xf1, 0xf8, 0x77, 0x00, 0x00, 0x00, 0xff, + 0xff, 0x49, 0xe5, 0x97, 0x8a, 0xbf, 0x04, 0x00, 0x00, } func (m *Model) Marshal() (dAtA []byte, err error) { diff --git a/x/model/types/model_version.pb.go b/x/model/types/model_version.pb.go index 8b5f3d70f..80bf09fa6 100644 --- a/x/model/types/model_version.pb.go +++ b/x/model/types/model_version.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: model/model_version.proto +// source: zigbeealliance/distributedcomplianceledger/model/model_version.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -46,7 +46,7 @@ func (m *ModelVersion) Reset() { *m = ModelVersion{} } func (m *ModelVersion) String() string { return proto.CompactTextString(m) } func (*ModelVersion) ProtoMessage() {} func (*ModelVersion) Descriptor() ([]byte, []int) { - return fileDescriptor_a0324d25600a9825, []int{0} + return fileDescriptor_2a6cd95628709428, []int{0} } func (m *ModelVersion) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -191,40 +191,42 @@ func init() { proto.RegisterType((*ModelVersion)(nil), "zigbeealliance.distributedcomplianceledger.model.ModelVersion") } -func init() { proto.RegisterFile("model/model_version.proto", fileDescriptor_a0324d25600a9825) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/model/model_version.proto", fileDescriptor_2a6cd95628709428) +} -var fileDescriptor_a0324d25600a9825 = []byte{ - // 478 bytes of a gzipped FileDescriptorProto +var fileDescriptor_2a6cd95628709428 = []byte{ + // 479 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x93, 0xc1, 0x6e, 0xd3, 0x30, - 0x18, 0xc7, 0x1b, 0xb6, 0x76, 0x9b, 0xb7, 0xb2, 0xc9, 0x0c, 0xe4, 0x21, 0x14, 0x45, 0x88, 0x43, - 0x2e, 0x6d, 0xa6, 0xc1, 0x0b, 0xac, 0x48, 0x93, 0x38, 0xb0, 0x43, 0x0a, 0x3b, 0x70, 0x19, 0x4e, - 0xfc, 0xb5, 0xb5, 0xb0, 0xeb, 0xc8, 0x76, 0xc7, 0xb6, 0xa7, 0xe0, 0x61, 0xf6, 0x10, 0x1c, 0x27, - 0x4e, 0x1c, 0x51, 0xfb, 0x22, 0xc8, 0x76, 0x2a, 0x4a, 0xd4, 0xed, 0x12, 0xc5, 0x3f, 0xff, 0xbf, - 0xbf, 0x3f, 0xe7, 0xcb, 0x1f, 0x1d, 0x49, 0xc5, 0x40, 0x64, 0xfe, 0x79, 0x79, 0x05, 0xda, 0x70, - 0x35, 0xed, 0x57, 0x5a, 0x59, 0x85, 0x8f, 0x6f, 0xf9, 0xb8, 0x00, 0xa0, 0x42, 0x70, 0x3a, 0x2d, - 0xa1, 0xcf, 0xb8, 0xb1, 0x9a, 0x17, 0x33, 0x0b, 0xac, 0x54, 0xb2, 0x0a, 0x54, 0x00, 0x1b, 0x83, - 0xee, 0xfb, 0xfa, 0x97, 0x47, 0xa5, 0x32, 0x52, 0x99, 0x4b, 0x5f, 0x9f, 0x85, 0x45, 0x30, 0x7b, - 0x7d, 0xd7, 0x46, 0x7b, 0x1f, 0x9d, 0xe8, 0x22, 0x9c, 0x81, 0x0f, 0xd0, 0xc6, 0x15, 0x67, 0x24, - 0x4a, 0xa2, 0xb4, 0x9d, 0xbb, 0x57, 0x47, 0x2a, 0xce, 0xc8, 0x93, 0x40, 0x2a, 0xce, 0x70, 0x8a, - 0xf6, 0x8d, 0x1a, 0xd9, 0xef, 0x54, 0x43, 0x5d, 0x46, 0x36, 0x92, 0x28, 0xed, 0xe6, 0x4d, 0x8c, - 0xdf, 0xa1, 0xe7, 0x0d, 0x34, 0xb4, 0x9a, 0x4f, 0xc7, 0x64, 0x33, 0x89, 0xd2, 0x9d, 0x7c, 0xfd, - 0xa6, 0xf3, 0x2f, 0x59, 0x8d, 0xce, 0x67, 0xb2, 0x00, 0x4d, 0xda, 0xfe, 0xf4, 0x26, 0xc6, 0xc7, - 0xe8, 0xd9, 0x88, 0x6b, 0xe9, 0x2c, 0x3e, 0x4c, 0x47, 0x4a, 0x4b, 0x6a, 0x5d, 0x37, 0x1d, 0xef, - 0xbe, 0x6e, 0x0b, 0x9f, 0xa0, 0xc3, 0xc6, 0xa1, 0x17, 0x54, 0x70, 0x46, 0xb6, 0x92, 0x28, 0xdd, - 0xce, 0xd7, 0xee, 0xe1, 0x17, 0xa8, 0xa3, 0x2c, 0xfd, 0xac, 0x05, 0xd9, 0xf6, 0xc6, 0xf5, 0x0a, - 0x27, 0x68, 0x57, 0x59, 0x7a, 0xc6, 0x05, 0x0c, 0xf9, 0x2d, 0x90, 0x9d, 0x24, 0x4a, 0x37, 0xf3, - 0x55, 0x54, 0x2b, 0xde, 0x4f, 0xa0, 0xfc, 0x66, 0x66, 0x92, 0x20, 0x5f, 0xbe, 0x8a, 0xdc, 0x5d, - 0x57, 0x96, 0x9f, 0x6e, 0x2a, 0x20, 0xbb, 0xe1, 0xae, 0x0d, 0x8c, 0x07, 0xe8, 0x95, 0xe4, 0xd3, - 0xd3, 0xaa, 0x12, 0xbc, 0xa4, 0x85, 0x80, 0x61, 0x63, 0x04, 0x7b, 0x7e, 0x04, 0x8f, 0x6a, 0xbc, - 0x07, 0xbd, 0x7e, 0xd8, 0xa3, 0x5b, 0x7b, 0x3c, 0xa2, 0x71, 0x1d, 0x6b, 0x10, 0x40, 0x0d, 0x9c, - 0x2b, 0x0b, 0xc6, 0x7d, 0x96, 0xa7, 0xfe, 0x5e, 0x4d, 0x8c, 0x4f, 0xd0, 0x56, 0xa9, 0x81, 0x5a, - 0xa5, 0xc9, 0xbe, 0x53, 0x0c, 0xc8, 0xaf, 0xbb, 0xde, 0x61, 0xfd, 0xff, 0x9d, 0x32, 0xa6, 0xc1, - 0x98, 0x30, 0xf2, 0x7c, 0x29, 0xc4, 0x6f, 0x50, 0xd7, 0x94, 0x13, 0x90, 0x74, 0xd9, 0xd2, 0x81, - 0x6f, 0xe9, 0x7f, 0x38, 0xf8, 0xfa, 0x73, 0x1e, 0x47, 0xf7, 0xf3, 0x38, 0xfa, 0x33, 0x8f, 0xa3, - 0x1f, 0x8b, 0xb8, 0x75, 0xbf, 0x88, 0x5b, 0xbf, 0x17, 0x71, 0xeb, 0xcb, 0xd9, 0x98, 0xdb, 0xc9, - 0xac, 0xe8, 0x97, 0x4a, 0x66, 0x21, 0x28, 0xbd, 0x65, 0x52, 0xb2, 0x95, 0xa4, 0xf4, 0xfe, 0x45, - 0xa5, 0x17, 0xb2, 0x92, 0x5d, 0x87, 0xb4, 0x65, 0xf6, 0xa6, 0x02, 0x53, 0x74, 0x7c, 0x3e, 0xde, - 0xfe, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x17, 0x6e, 0xf7, 0x8b, 0x89, 0x03, 0x00, 0x00, + 0x18, 0xc7, 0x1b, 0xb6, 0x76, 0x9b, 0xb7, 0xb2, 0xc9, 0x0c, 0x64, 0x10, 0x8a, 0x22, 0xc4, 0x21, + 0x97, 0xb6, 0xd3, 0xe0, 0x05, 0x56, 0xd0, 0x24, 0x0e, 0xec, 0x90, 0xc2, 0x0e, 0x5c, 0x86, 0x13, + 0x7f, 0x6d, 0x2d, 0xec, 0x38, 0xb2, 0xdd, 0xb1, 0xed, 0x29, 0x78, 0x98, 0x3d, 0x04, 0xc7, 0x89, + 0x13, 0x47, 0xd4, 0xbe, 0x08, 0xb2, 0x9d, 0x89, 0x2e, 0x2a, 0xbd, 0x44, 0xf1, 0xcf, 0xff, 0xef, + 0xef, 0xcf, 0xf9, 0xf2, 0x47, 0xef, 0x6f, 0xf8, 0x24, 0x07, 0xa0, 0x42, 0x70, 0x5a, 0x16, 0x30, + 0x60, 0xdc, 0x58, 0xcd, 0xf3, 0x99, 0x05, 0x56, 0x28, 0x59, 0x05, 0x2a, 0x80, 0x4d, 0x40, 0x0f, + 0xa4, 0x62, 0x20, 0xc2, 0xf3, 0xe2, 0x12, 0xb4, 0xe1, 0xaa, 0xec, 0x57, 0x5a, 0x59, 0x85, 0x8f, + 0x1e, 0xba, 0xf4, 0xd7, 0xb8, 0xf4, 0x7d, 0xfd, 0x8b, 0xe7, 0x85, 0x32, 0x52, 0x99, 0x0b, 0x5f, + 0x3f, 0x08, 0x8b, 0x60, 0xf6, 0xea, 0xb6, 0x8d, 0xf6, 0x3e, 0x3a, 0xd1, 0x79, 0x38, 0x03, 0x1f, + 0xa0, 0x8d, 0x4b, 0xce, 0x48, 0x94, 0x44, 0x69, 0x3b, 0x73, 0xaf, 0x8e, 0x54, 0x9c, 0x91, 0x47, + 0x81, 0x54, 0x9c, 0xe1, 0x14, 0xed, 0x1b, 0x35, 0xb6, 0xdf, 0xa9, 0x86, 0xba, 0x8c, 0x6c, 0x24, + 0x51, 0xda, 0xcd, 0x9a, 0x18, 0xbf, 0x45, 0x4f, 0x1b, 0x68, 0x64, 0x35, 0x2f, 0x27, 0x64, 0x33, + 0x89, 0xd2, 0x9d, 0x6c, 0xf5, 0xa6, 0xf3, 0x2f, 0x58, 0x8d, 0xce, 0x66, 0x32, 0x07, 0x4d, 0xda, + 0xfe, 0xf4, 0x26, 0xc6, 0x47, 0xe8, 0xc9, 0x98, 0x6b, 0xe9, 0x2c, 0x3e, 0x94, 0x63, 0xa5, 0x25, + 0xb5, 0xae, 0x9b, 0x8e, 0x77, 0x5f, 0xb5, 0x85, 0x8f, 0xd1, 0x61, 0xe3, 0xd0, 0x73, 0x2a, 0x38, + 0x23, 0x5b, 0x49, 0x94, 0x6e, 0x67, 0x2b, 0xf7, 0xf0, 0x33, 0xd4, 0x51, 0x96, 0x7e, 0xd6, 0x82, + 0x6c, 0x7b, 0xe3, 0x7a, 0x85, 0x13, 0xb4, 0xab, 0x2c, 0x3d, 0xe5, 0x02, 0x46, 0xfc, 0x06, 0xc8, + 0x4e, 0x12, 0xa5, 0x9b, 0xd9, 0x32, 0xaa, 0x15, 0xef, 0xa6, 0x50, 0x7c, 0x33, 0x33, 0x49, 0x90, + 0x2f, 0x5f, 0x46, 0xee, 0xae, 0x4b, 0xcb, 0x4f, 0xd7, 0x15, 0x90, 0xdd, 0x70, 0xd7, 0x06, 0xc6, + 0x43, 0xf4, 0x52, 0xf2, 0xf2, 0xa4, 0xaa, 0x04, 0x2f, 0x68, 0x2e, 0x60, 0xd4, 0x18, 0xc1, 0x9e, + 0x1f, 0xc1, 0x5a, 0x8d, 0xf7, 0xa0, 0x57, 0xff, 0xf7, 0xe8, 0xd6, 0x1e, 0x6b, 0x34, 0xae, 0x63, + 0x0d, 0x02, 0xa8, 0x81, 0x33, 0x65, 0xc1, 0xb8, 0xcf, 0xf2, 0xd8, 0xdf, 0xab, 0x89, 0xf1, 0x31, + 0xda, 0x2a, 0x34, 0x50, 0xab, 0x34, 0xd9, 0x77, 0x8a, 0x21, 0xf9, 0x75, 0xdb, 0x3b, 0xac, 0xff, + 0xbf, 0x13, 0xc6, 0x34, 0x18, 0x13, 0x46, 0x9e, 0xdd, 0x0b, 0xf1, 0x6b, 0xd4, 0x35, 0xc5, 0x14, + 0x24, 0xbd, 0x6f, 0xe9, 0xc0, 0xb7, 0xf4, 0x10, 0x0e, 0xbf, 0xfe, 0x9c, 0xc7, 0xd1, 0xdd, 0x3c, + 0x8e, 0xfe, 0xcc, 0xe3, 0xe8, 0xc7, 0x22, 0x6e, 0xdd, 0x2d, 0xe2, 0xd6, 0xef, 0x45, 0xdc, 0xfa, + 0x72, 0x3a, 0xe1, 0x76, 0x3a, 0xcb, 0xfb, 0x85, 0x92, 0x83, 0x10, 0x94, 0xde, 0xaa, 0xbc, 0xf5, + 0xfe, 0x45, 0xa5, 0x57, 0x27, 0xee, 0xaa, 0xce, 0x9c, 0xbd, 0xae, 0xc0, 0xe4, 0x1d, 0x9f, 0x8f, + 0x37, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x3e, 0x2f, 0xce, 0x6e, 0xb4, 0x03, 0x00, 0x00, } func (m *ModelVersion) Marshal() (dAtA []byte, err error) { diff --git a/x/model/types/model_versions.pb.go b/x/model/types/model_versions.pb.go index 2da160614..e0f41bb99 100644 --- a/x/model/types/model_versions.pb.go +++ b/x/model/types/model_versions.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: model/model_versions.proto +// source: zigbeealliance/distributedcomplianceledger/model/model_versions.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -32,7 +32,7 @@ func (m *ModelVersions) Reset() { *m = ModelVersions{} } func (m *ModelVersions) String() string { return proto.CompactTextString(m) } func (*ModelVersions) ProtoMessage() {} func (*ModelVersions) Descriptor() ([]byte, []int) { - return fileDescriptor_f2ffc766b6a650e7, []int{0} + return fileDescriptor_eb2b229077d76c14, []int{0} } func (m *ModelVersions) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -86,24 +86,26 @@ func init() { proto.RegisterType((*ModelVersions)(nil), "zigbeealliance.distributedcomplianceledger.model.ModelVersions") } -func init() { proto.RegisterFile("model/model_versions.proto", fileDescriptor_f2ffc766b6a650e7) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/model/model_versions.proto", fileDescriptor_eb2b229077d76c14) +} -var fileDescriptor_f2ffc766b6a650e7 = []byte{ - // 223 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xca, 0xcd, 0x4f, 0x49, - 0xcd, 0xd1, 0x07, 0x93, 0xf1, 0x65, 0xa9, 0x45, 0xc5, 0x99, 0xf9, 0x79, 0xc5, 0x7a, 0x05, 0x45, - 0xf9, 0x25, 0xf9, 0x42, 0x06, 0x55, 0x99, 0xe9, 0x49, 0xa9, 0xa9, 0x89, 0x39, 0x39, 0x99, 0x89, - 0x79, 0xc9, 0xa9, 0x7a, 0x29, 0x99, 0xc5, 0x25, 0x45, 0x99, 0x49, 0xa5, 0x25, 0xa9, 0x29, 0xc9, - 0xf9, 0xb9, 0x05, 0x10, 0xd1, 0x9c, 0xd4, 0x94, 0xf4, 0xd4, 0x22, 0x3d, 0xb0, 0x01, 0x4a, 0xf1, - 0x5c, 0xbc, 0xbe, 0x20, 0x46, 0x18, 0xd4, 0x20, 0x21, 0x01, 0x2e, 0xe6, 0xb2, 0xcc, 0x14, 0x09, - 0x46, 0x05, 0x46, 0x0d, 0xd6, 0x20, 0x10, 0x13, 0x24, 0x52, 0x90, 0x99, 0x22, 0xc1, 0x04, 0x11, - 0x29, 0xc8, 0x4c, 0x11, 0xd2, 0xe2, 0x12, 0x28, 0xce, 0x4f, 0x2b, 0x29, 0x4f, 0x2c, 0x4a, 0x85, - 0xe9, 0x93, 0x60, 0x56, 0x60, 0xd6, 0xe0, 0x0d, 0xc2, 0x10, 0x77, 0x4a, 0x38, 0xf1, 0x48, 0x8e, - 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, 0xe4, 0x18, 0x27, 0x3c, 0x96, 0x63, 0xb8, 0xf0, 0x58, - 0x8e, 0xe1, 0xc6, 0x63, 0x39, 0x86, 0x28, 0xb7, 0xf4, 0xcc, 0x92, 0x8c, 0xd2, 0x24, 0xbd, 0xe4, - 0xfc, 0x5c, 0x7d, 0x88, 0xbb, 0x75, 0x61, 0x0e, 0xd7, 0x47, 0x72, 0xb8, 0x2e, 0xc2, 0xe5, 0xba, - 0x10, 0xa7, 0xeb, 0x57, 0x40, 0x7c, 0xaf, 0x5f, 0x52, 0x59, 0x90, 0x5a, 0x9c, 0xc4, 0x06, 0xf6, - 0xbb, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0x4c, 0xb5, 0x84, 0xae, 0x19, 0x01, 0x00, 0x00, +var fileDescriptor_eb2b229077d76c14 = []byte{ + // 224 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0xad, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0xe7, 0xe6, 0xa7, 0xa4, 0xe6, 0x40, 0xc8, 0xf8, 0xb2, 0xd4, 0xa2, 0xe2, 0xcc, + 0xfc, 0xbc, 0x62, 0xbd, 0x82, 0xa2, 0xfc, 0x92, 0x7c, 0x21, 0x03, 0x54, 0x63, 0xf4, 0xf0, 0x18, + 0xa3, 0x07, 0x36, 0x40, 0x29, 0x9e, 0x8b, 0xd7, 0x17, 0xc4, 0x08, 0x83, 0x1a, 0x24, 0x24, 0xc0, + 0xc5, 0x5c, 0x96, 0x99, 0x22, 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0x1a, 0x04, 0x62, 0x82, 0x44, 0x0a, + 0x32, 0x53, 0x24, 0x98, 0x20, 0x22, 0x05, 0x99, 0x29, 0x42, 0x5a, 0x5c, 0x02, 0xc5, 0xf9, 0x69, + 0x25, 0xe5, 0x89, 0x45, 0xa9, 0x30, 0x7d, 0x12, 0xcc, 0x0a, 0xcc, 0x1a, 0xbc, 0x41, 0x18, 0xe2, + 0x4e, 0x09, 0x27, 0x1e, 0xc9, 0x31, 0x5e, 0x78, 0x24, 0xc7, 0xf8, 0xe0, 0x91, 0x1c, 0xe3, 0x84, + 0xc7, 0x72, 0x0c, 0x17, 0x1e, 0xcb, 0x31, 0xdc, 0x78, 0x2c, 0xc7, 0x10, 0xe5, 0x96, 0x9e, 0x59, + 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, 0xab, 0x0f, 0x71, 0xb7, 0x2e, 0x36, 0xff, 0xeb, 0x22, + 0x5c, 0xae, 0x0b, 0x0d, 0x81, 0x0a, 0x68, 0x18, 0x94, 0x54, 0x16, 0xa4, 0x16, 0x27, 0xb1, 0x81, + 0xfd, 0x6e, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0x24, 0x4a, 0xd5, 0x21, 0x44, 0x01, 0x00, 0x00, } func (m *ModelVersions) Marshal() (dAtA []byte, err error) { diff --git a/x/model/types/product.pb.go b/x/model/types/product.pb.go index 207556873..43400f58f 100644 --- a/x/model/types/product.pb.go +++ b/x/model/types/product.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: model/product.proto +// source: zigbeealliance/distributedcomplianceledger/model/product.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -32,7 +32,7 @@ func (m *Product) Reset() { *m = Product{} } func (m *Product) String() string { return proto.CompactTextString(m) } func (*Product) ProtoMessage() {} func (*Product) Descriptor() ([]byte, []int) { - return fileDescriptor_84a2bcf6440d083f, []int{0} + return fileDescriptor_a2362e9df72af44d, []int{0} } func (m *Product) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -86,24 +86,26 @@ func init() { proto.RegisterType((*Product)(nil), "zigbeealliance.distributedcomplianceledger.model.Product") } -func init() { proto.RegisterFile("model/product.proto", fileDescriptor_84a2bcf6440d083f) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/model/product.proto", fileDescriptor_a2362e9df72af44d) +} -var fileDescriptor_84a2bcf6440d083f = []byte{ - // 219 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x4c, 0xcf, 0xbd, 0x4a, 0xc5, 0x30, - 0x1c, 0x05, 0xf0, 0xc6, 0xeb, 0x07, 0x66, 0x92, 0xb8, 0x74, 0x0a, 0x17, 0xa7, 0xbb, 0x34, 0x11, - 0x7c, 0x03, 0x07, 0x47, 0x95, 0x8e, 0x4e, 0xe6, 0xe3, 0x4f, 0x0d, 0x24, 0x4d, 0x48, 0x13, 0x50, - 0x9f, 0xc2, 0xc7, 0x72, 0xec, 0xe8, 0x28, 0xed, 0x8b, 0x88, 0x09, 0x62, 0xb7, 0xc3, 0x09, 0xf9, - 0xf3, 0x3b, 0xf8, 0xd2, 0x79, 0x0d, 0x96, 0x87, 0xe8, 0x75, 0x56, 0x89, 0x85, 0xe8, 0x93, 0x27, - 0xd7, 0xef, 0x66, 0x90, 0x00, 0xc2, 0x5a, 0x23, 0x46, 0x05, 0x4c, 0x9b, 0x29, 0x45, 0x23, 0x73, - 0x02, 0xad, 0xbc, 0x0b, 0xb5, 0xb5, 0xa0, 0x07, 0x88, 0xac, 0xfc, 0xbf, 0x7a, 0xc0, 0x67, 0x8f, - 0xf5, 0x04, 0xb9, 0xc0, 0xbb, 0x60, 0x74, 0x8b, 0xf6, 0xe8, 0x70, 0xd2, 0xff, 0x46, 0x42, 0xf0, - 0xf1, 0x28, 0x1c, 0xb4, 0x47, 0x7b, 0x74, 0x38, 0xef, 0x4b, 0x26, 0x14, 0xe3, 0x20, 0x62, 0xba, - 0xcf, 0x4e, 0x42, 0x6c, 0x77, 0xe5, 0x65, 0xd3, 0xdc, 0x3e, 0x7f, 0x2e, 0x14, 0xcd, 0x0b, 0x45, - 0xdf, 0x0b, 0x45, 0x1f, 0x2b, 0x6d, 0xe6, 0x95, 0x36, 0x5f, 0x2b, 0x6d, 0x9e, 0xee, 0x06, 0x93, - 0x5e, 0xb2, 0x64, 0xca, 0x3b, 0x5e, 0x9d, 0xdd, 0x1f, 0x94, 0x6f, 0xa0, 0xdd, 0xbf, 0xb4, 0xab, - 0x54, 0xfe, 0xca, 0xeb, 0xd8, 0xf4, 0x16, 0x60, 0x92, 0xa7, 0x65, 0xeb, 0xcd, 0x4f, 0x00, 0x00, - 0x00, 0xff, 0xff, 0x2c, 0x1b, 0x69, 0x2b, 0x02, 0x01, 0x00, 0x00, +var fileDescriptor_a2362e9df72af44d = []byte{ + // 220 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0xab, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0xe7, 0xe6, 0xa7, 0xa4, 0xe6, 0xe8, 0x17, 0x14, 0xe5, 0xa7, 0x94, 0x26, 0x97, + 0xe8, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x19, 0xa0, 0xea, 0xd7, 0xc3, 0xa3, 0x5f, 0x0f, 0xac, + 0x5f, 0xc9, 0x9f, 0x8b, 0x3d, 0x00, 0x62, 0x84, 0x90, 0x00, 0x17, 0x73, 0x41, 0x66, 0x8a, 0x04, + 0xa3, 0x02, 0xa3, 0x06, 0x6b, 0x10, 0x88, 0x29, 0x24, 0xc4, 0xc5, 0x92, 0x97, 0x98, 0x9b, 0x2a, + 0xc1, 0xa4, 0xc0, 0xa8, 0xc1, 0x19, 0x04, 0x66, 0x0b, 0xc9, 0x71, 0x71, 0x15, 0x24, 0x16, 0x95, + 0xf8, 0x95, 0xe6, 0x26, 0xa5, 0x16, 0x49, 0x30, 0x83, 0x65, 0x90, 0x44, 0x9c, 0x12, 0x4e, 0x3c, + 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x09, 0x8f, 0xe5, 0x18, 0x2e, + 0x3c, 0x96, 0x63, 0xb8, 0xf1, 0x58, 0x8e, 0x21, 0xca, 0x2d, 0x3d, 0xb3, 0x24, 0xa3, 0x34, 0x49, + 0x2f, 0x39, 0x3f, 0x57, 0x1f, 0xe2, 0x4e, 0x5d, 0x6c, 0x1e, 0xd5, 0x45, 0xb8, 0x54, 0x17, 0xea, + 0xd5, 0x0a, 0xa8, 0x67, 0x4b, 0x2a, 0x0b, 0x52, 0x8b, 0x93, 0xd8, 0xc0, 0x7e, 0x35, 0x06, 0x04, + 0x00, 0x00, 0xff, 0xff, 0x00, 0xaf, 0x53, 0x85, 0x2d, 0x01, 0x00, 0x00, } func (m *Product) Marshal() (dAtA []byte, err error) { diff --git a/x/model/types/query.pb.go b/x/model/types/query.pb.go index 9c3a47b27..ee0865707 100644 --- a/x/model/types/query.pb.go +++ b/x/model/types/query.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: model/query.proto +// source: zigbeealliance/distributedcomplianceledger/model/query.proto package types @@ -7,9 +7,9 @@ import ( context "context" fmt "fmt" query "github.com/cosmos/cosmos-sdk/types/query" - _ "github.com/gogo/protobuf/gogoproto" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -38,7 +38,7 @@ func (m *QueryGetVendorProductsRequest) Reset() { *m = QueryGetVendorPro func (m *QueryGetVendorProductsRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetVendorProductsRequest) ProtoMessage() {} func (*QueryGetVendorProductsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_7ed1e5616dfbc517, []int{0} + return fileDescriptor_4b499a89cb430a11, []int{0} } func (m *QueryGetVendorProductsRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -82,7 +82,7 @@ func (m *QueryGetVendorProductsResponse) Reset() { *m = QueryGetVendorPr func (m *QueryGetVendorProductsResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetVendorProductsResponse) ProtoMessage() {} func (*QueryGetVendorProductsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_7ed1e5616dfbc517, []int{1} + return fileDescriptor_4b499a89cb430a11, []int{1} } func (m *QueryGetVendorProductsResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -127,7 +127,7 @@ func (m *QueryGetModelRequest) Reset() { *m = QueryGetModelRequest{} } func (m *QueryGetModelRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetModelRequest) ProtoMessage() {} func (*QueryGetModelRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_7ed1e5616dfbc517, []int{2} + return fileDescriptor_4b499a89cb430a11, []int{2} } func (m *QueryGetModelRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -178,7 +178,7 @@ func (m *QueryGetModelResponse) Reset() { *m = QueryGetModelResponse{} } func (m *QueryGetModelResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetModelResponse) ProtoMessage() {} func (*QueryGetModelResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_7ed1e5616dfbc517, []int{3} + return fileDescriptor_4b499a89cb430a11, []int{3} } func (m *QueryGetModelResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -222,7 +222,7 @@ func (m *QueryAllModelRequest) Reset() { *m = QueryAllModelRequest{} } func (m *QueryAllModelRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllModelRequest) ProtoMessage() {} func (*QueryAllModelRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_7ed1e5616dfbc517, []int{4} + return fileDescriptor_4b499a89cb430a11, []int{4} } func (m *QueryAllModelRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -267,7 +267,7 @@ func (m *QueryAllModelResponse) Reset() { *m = QueryAllModelResponse{} } func (m *QueryAllModelResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllModelResponse) ProtoMessage() {} func (*QueryAllModelResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_7ed1e5616dfbc517, []int{5} + return fileDescriptor_4b499a89cb430a11, []int{5} } func (m *QueryAllModelResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -320,7 +320,7 @@ func (m *QueryGetModelVersionRequest) Reset() { *m = QueryGetModelVersio func (m *QueryGetModelVersionRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetModelVersionRequest) ProtoMessage() {} func (*QueryGetModelVersionRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_7ed1e5616dfbc517, []int{6} + return fileDescriptor_4b499a89cb430a11, []int{6} } func (m *QueryGetModelVersionRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -378,7 +378,7 @@ func (m *QueryGetModelVersionResponse) Reset() { *m = QueryGetModelVersi func (m *QueryGetModelVersionResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetModelVersionResponse) ProtoMessage() {} func (*QueryGetModelVersionResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_7ed1e5616dfbc517, []int{7} + return fileDescriptor_4b499a89cb430a11, []int{7} } func (m *QueryGetModelVersionResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -423,7 +423,7 @@ func (m *QueryGetModelVersionsRequest) Reset() { *m = QueryGetModelVersi func (m *QueryGetModelVersionsRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetModelVersionsRequest) ProtoMessage() {} func (*QueryGetModelVersionsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_7ed1e5616dfbc517, []int{8} + return fileDescriptor_4b499a89cb430a11, []int{8} } func (m *QueryGetModelVersionsRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -474,7 +474,7 @@ func (m *QueryGetModelVersionsResponse) Reset() { *m = QueryGetModelVers func (m *QueryGetModelVersionsResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetModelVersionsResponse) ProtoMessage() {} func (*QueryGetModelVersionsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_7ed1e5616dfbc517, []int{9} + return fileDescriptor_4b499a89cb430a11, []int{9} } func (m *QueryGetModelVersionsResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -523,56 +523,58 @@ func init() { proto.RegisterType((*QueryGetModelVersionsResponse)(nil), "zigbeealliance.distributedcomplianceledger.model.QueryGetModelVersionsResponse") } -func init() { proto.RegisterFile("model/query.proto", fileDescriptor_7ed1e5616dfbc517) } - -var fileDescriptor_7ed1e5616dfbc517 = []byte{ - // 722 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x96, 0x41, 0x4f, 0xd4, 0x40, - 0x14, 0xc7, 0x77, 0xc0, 0x35, 0x64, 0x04, 0x94, 0x11, 0x23, 0x2c, 0xd0, 0xc5, 0x26, 0x0a, 0x31, - 0xa1, 0xe3, 0xc2, 0xc1, 0xe8, 0x41, 0x85, 0x03, 0x9c, 0x10, 0x5c, 0x13, 0x0e, 0x1e, 0xc4, 0xee, - 0x76, 0x2c, 0x13, 0xba, 0x9d, 0xd2, 0x99, 0xad, 0x22, 0xe1, 0xe2, 0xc9, 0x83, 0x07, 0x12, 0xbf, - 0x80, 0x5f, 0x42, 0xaf, 0x5e, 0x39, 0x92, 0xe8, 0x41, 0x2f, 0xc6, 0x80, 0x07, 0x3f, 0x86, 0xe9, - 0xcc, 0xd4, 0x6d, 0x0b, 0x48, 0x76, 0xd9, 0xcb, 0xa6, 0x79, 0xed, 0xfb, 0xbf, 0xdf, 0xff, 0xcd, - 0x7b, 0x03, 0x70, 0xa8, 0xc1, 0x1c, 0xe2, 0xe1, 0xad, 0x26, 0x09, 0xb7, 0xad, 0x20, 0x64, 0x82, - 0xa1, 0x3b, 0x6f, 0xa8, 0x5b, 0x23, 0xc4, 0xf6, 0x3c, 0x6a, 0xfb, 0x75, 0x62, 0x39, 0x94, 0x8b, - 0x90, 0xd6, 0x9a, 0x82, 0x38, 0x75, 0xd6, 0x08, 0x54, 0xd4, 0x23, 0x8e, 0x4b, 0x42, 0x4b, 0x66, - 0x97, 0xc6, 0x5d, 0xc6, 0x5c, 0x8f, 0x60, 0x3b, 0xa0, 0xd8, 0xf6, 0x7d, 0x26, 0x6c, 0x41, 0x99, - 0xcf, 0x95, 0x5e, 0xe9, 0x76, 0x9d, 0xf1, 0x06, 0xe3, 0xb8, 0x66, 0x73, 0xa2, 0x0a, 0xe1, 0xa8, - 0x52, 0x23, 0xc2, 0xae, 0xe0, 0xc0, 0x76, 0xa9, 0x2f, 0x3f, 0xd6, 0xdf, 0x8e, 0x29, 0x9c, 0x88, - 0xf8, 0x0e, 0x0b, 0xd7, 0x83, 0x90, 0x39, 0xcd, 0xba, 0x48, 0x84, 0x34, 0xab, 0xfc, 0xd5, 0xa1, - 0xd1, 0x54, 0x68, 0x3d, 0x22, 0x21, 0x6f, 0x49, 0x95, 0x4e, 0x78, 0x95, 0x28, 0x0d, 0xbb, 0xcc, - 0x65, 0xf2, 0x11, 0xc7, 0x4f, 0x2a, 0x6a, 0x56, 0xe0, 0xc4, 0x93, 0x18, 0x6f, 0x89, 0x88, 0x35, - 0x09, 0xb0, 0xaa, 0xeb, 0x57, 0xc9, 0x56, 0x93, 0x70, 0x81, 0xae, 0xc0, 0xde, 0x88, 0x3a, 0x23, - 0x60, 0x12, 0x4c, 0x17, 0xab, 0xf1, 0xa3, 0xb9, 0x07, 0xa0, 0x71, 0x5a, 0x0e, 0x0f, 0x98, 0xcf, - 0x09, 0xf2, 0xe1, 0x60, 0x94, 0x79, 0x23, 0xf3, 0x2f, 0xcd, 0x3e, 0xb2, 0xda, 0xed, 0xb3, 0x95, - 0xad, 0xb0, 0x70, 0x61, 0xff, 0x67, 0xb9, 0x50, 0xcd, 0xa9, 0x9b, 0xf7, 0xe1, 0x70, 0x42, 0xb4, - 0x1c, 0x67, 0x9d, 0x0a, 0x1f, 0x47, 0x02, 0xea, 0x8c, 0xf4, 0xa8, 0x48, 0x40, 0x1d, 0xd3, 0x83, - 0xd7, 0x72, 0xb9, 0xda, 0xc4, 0x53, 0x58, 0x94, 0x08, 0x9a, 0xfd, 0x6e, 0xfb, 0xec, 0x52, 0x4f, - 0x23, 0x2b, 0x2d, 0xf3, 0xb9, 0x26, 0x9d, 0xf7, 0xbc, 0x0c, 0xe9, 0x22, 0x84, 0xad, 0xc1, 0xd0, - 0x15, 0x6f, 0x59, 0x6a, 0x8a, 0xac, 0x78, 0x8a, 0x2c, 0x35, 0xae, 0x7a, 0x8a, 0xac, 0x55, 0xdb, - 0x25, 0x3a, 0xb7, 0x9a, 0xca, 0x34, 0x3f, 0x01, 0x6d, 0xa7, 0x55, 0xe0, 0xb8, 0x9d, 0xde, 0x6e, - 0xd9, 0x41, 0x4b, 0x19, 0xec, 0x1e, 0x89, 0x3d, 0x75, 0x26, 0xb6, 0x22, 0xca, 0x70, 0x6f, 0xc2, - 0xb1, 0xcc, 0x29, 0xac, 0xa9, 0xe1, 0x6d, 0xe3, 0x20, 0xd1, 0x34, 0xbc, 0xcc, 0xd9, 0x4b, 0xf1, - 0xca, 0x0e, 0x89, 0xce, 0x1e, 0xe9, 0x9d, 0x04, 0xd3, 0x03, 0xd5, 0x7c, 0xd8, 0x7c, 0x07, 0xe0, - 0xf8, 0xc9, 0xd5, 0x74, 0xaf, 0x36, 0x60, 0x7f, 0x23, 0x15, 0xd7, 0xe7, 0xf1, 0xa0, 0xc3, 0x96, - 0x69, 0x15, 0xdd, 0xb9, 0x8c, 0xb2, 0xb9, 0x70, 0x32, 0x09, 0x6f, 0x67, 0x82, 0xdf, 0x83, 0xd6, - 0x12, 0xe7, 0x44, 0xb4, 0x9f, 0x4d, 0x38, 0x90, 0xae, 0x9a, 0xac, 0xe3, 0xc3, 0xf3, 0x19, 0x4a, - 0xb6, 0x31, 0xab, 0x3d, 0xfb, 0xb1, 0x0f, 0x16, 0x25, 0x0e, 0xfa, 0x06, 0xe0, 0x60, 0x76, 0x7f, - 0xd1, 0x4a, 0xfb, 0x25, 0xff, 0x7b, 0x3f, 0x95, 0x56, 0xbb, 0x27, 0xa8, 0x9a, 0x65, 0x96, 0xdf, - 0x7e, 0xfd, 0xfd, 0xa1, 0x67, 0x14, 0x5d, 0xc7, 0x4e, 0x5d, 0xdf, 0xa5, 0xea, 0x97, 0xe3, 0x9d, - 0x88, 0x3a, 0xbb, 0xe8, 0x0b, 0x80, 0x45, 0xd9, 0x07, 0xb4, 0xd8, 0x79, 0xf1, 0xf4, 0xf6, 0x97, - 0x96, 0xce, 0xad, 0xa3, 0xd9, 0x6f, 0x4a, 0xf6, 0x32, 0x9a, 0x38, 0x85, 0x1d, 0xef, 0x04, 0xb1, - 0x83, 0xcf, 0x00, 0xf6, 0xc9, 0xc4, 0x79, 0xaf, 0x73, 0x13, 0xb9, 0x2b, 0xac, 0x63, 0x13, 0xf9, - 0x9b, 0xca, 0x1c, 0x95, 0x26, 0xae, 0xa2, 0xa1, 0x63, 0x26, 0xd0, 0x1f, 0x00, 0xfb, 0xd3, 0x23, - 0x88, 0x96, 0xcf, 0xd9, 0xb9, 0xec, 0x3d, 0x53, 0x7a, 0xdc, 0x2d, 0x39, 0x6d, 0xe5, 0x9e, 0xb4, - 0x32, 0x87, 0x2a, 0x29, 0x2b, 0xc9, 0xdf, 0xe5, 0xf4, 0x89, 0xe0, 0x9d, 0xdc, 0x1d, 0xb5, 0x8b, - 0x7e, 0x00, 0x38, 0x90, 0xd9, 0x36, 0xd4, 0x25, 0xb8, 0x7f, 0xab, 0xb3, 0xd2, 0x35, 0x3d, 0xed, - 0x76, 0x4a, 0xba, 0xbd, 0x81, 0xca, 0x67, 0xb8, 0x5d, 0x78, 0xb1, 0x7f, 0x68, 0x80, 0x83, 0x43, - 0x03, 0xfc, 0x3a, 0x34, 0xc0, 0xde, 0x91, 0x51, 0x38, 0x38, 0x32, 0x0a, 0xdf, 0x8f, 0x8c, 0xc2, - 0xb3, 0x45, 0x97, 0x8a, 0x8d, 0x66, 0xcd, 0xaa, 0xb3, 0x06, 0x56, 0x74, 0x33, 0x09, 0x1e, 0x4e, - 0xe1, 0xcd, 0xb4, 0xf8, 0x66, 0x14, 0x20, 0x7e, 0xad, 0x0b, 0x8a, 0xed, 0x80, 0xf0, 0xda, 0x45, - 0xf9, 0xef, 0xcd, 0xdc, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x5d, 0x86, 0x55, 0x63, 0xec, 0x09, - 0x00, 0x00, +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/model/query.proto", fileDescriptor_4b499a89cb430a11) +} + +var fileDescriptor_4b499a89cb430a11 = []byte{ + // 730 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x41, 0x4f, 0x13, 0x41, + 0x14, 0xee, 0x80, 0x35, 0x64, 0x04, 0xd4, 0x11, 0x23, 0x54, 0xd8, 0xe2, 0x26, 0x0a, 0x31, 0x61, + 0xc7, 0xc2, 0xc1, 0x68, 0x8c, 0x0a, 0xd1, 0x72, 0x42, 0xb0, 0x26, 0x1c, 0x3c, 0x88, 0xdb, 0xee, + 0xb8, 0x6c, 0xd8, 0xee, 0x2c, 0x3b, 0xdb, 0x2a, 0x12, 0x2e, 0x9e, 0x3c, 0x78, 0x20, 0xf1, 0x0f, + 0xf8, 0x27, 0xf4, 0xea, 0x95, 0x23, 0x89, 0x1e, 0xf4, 0x62, 0x0c, 0x78, 0xf0, 0x67, 0x98, 0x9d, + 0x99, 0xb5, 0x3b, 0x0b, 0x85, 0x6c, 0xdb, 0x0b, 0x99, 0x3c, 0xf6, 0xfb, 0xde, 0xf7, 0xbd, 0x79, + 0xef, 0x75, 0xe0, 0xbd, 0xb7, 0x8e, 0x5d, 0x25, 0xc4, 0x74, 0x5d, 0xc7, 0xf4, 0x6a, 0x04, 0x5b, + 0x0e, 0x0b, 0x03, 0xa7, 0xda, 0x08, 0x89, 0x55, 0xa3, 0x75, 0x5f, 0x44, 0x5d, 0x62, 0xd9, 0x24, + 0xc0, 0x75, 0x6a, 0x11, 0x17, 0x6f, 0x36, 0x48, 0xb0, 0x65, 0xf8, 0x01, 0x0d, 0x29, 0xba, 0xa5, + 0xa2, 0x8d, 0x13, 0xd0, 0x06, 0x47, 0x17, 0xc6, 0x6d, 0x4a, 0x6d, 0x97, 0x60, 0xd3, 0x77, 0xb0, + 0xe9, 0x79, 0x34, 0x34, 0x43, 0x87, 0x7a, 0x4c, 0xf0, 0x15, 0x6e, 0xd6, 0x28, 0xab, 0x53, 0x86, + 0xab, 0x26, 0x23, 0x22, 0x11, 0x6e, 0x96, 0xaa, 0x24, 0x34, 0x4b, 0xd8, 0x37, 0x6d, 0xc7, 0xe3, + 0x1f, 0xcb, 0x6f, 0xcb, 0x99, 0x95, 0x37, 0x89, 0x67, 0xd1, 0x60, 0xcd, 0x0f, 0xa8, 0xd5, 0xa8, + 0x85, 0x71, 0xce, 0xec, 0x15, 0xe0, 0x7f, 0x25, 0xfa, 0x51, 0x67, 0xe8, 0xb5, 0x26, 0x09, 0x58, + 0xcb, 0xcb, 0xe3, 0xee, 0x58, 0x62, 0x2b, 0x23, 0x36, 0xb5, 0x29, 0x3f, 0xe2, 0xe8, 0x24, 0xa2, + 0x7a, 0x09, 0x4e, 0x3c, 0x8d, 0x4a, 0xb9, 0x48, 0xc2, 0x55, 0x5e, 0x81, 0x15, 0x59, 0x80, 0x0a, + 0xd9, 0x6c, 0x10, 0x16, 0xa2, 0x0b, 0xb0, 0xbf, 0xe9, 0x58, 0xa3, 0x60, 0x12, 0x4c, 0xe7, 0x2b, + 0xd1, 0x51, 0xdf, 0x05, 0x50, 0x6b, 0x87, 0x61, 0x3e, 0xf5, 0x18, 0x41, 0x1e, 0x1c, 0x6e, 0x2a, + 0xff, 0xe1, 0xf8, 0x73, 0xb3, 0x0f, 0x8d, 0xac, 0x3d, 0x61, 0xa8, 0x19, 0x16, 0xce, 0xec, 0xfd, + 0x2a, 0xe6, 0x2a, 0x29, 0x76, 0xfd, 0x2e, 0x1c, 0x89, 0x15, 0x2d, 0x45, 0xa8, 0xb6, 0xe2, 0xa3, + 0x88, 0xef, 0x58, 0xa3, 0x7d, 0x22, 0xe2, 0x3b, 0x96, 0xee, 0xc2, 0xcb, 0x29, 0xac, 0x34, 0xf1, + 0x0c, 0xe6, 0xb9, 0x04, 0xa9, 0xfd, 0x76, 0x76, 0xed, 0x9c, 0x4f, 0x4a, 0x16, 0x5c, 0xfa, 0x0b, + 0xa9, 0x74, 0xde, 0x75, 0x15, 0xa5, 0x65, 0x08, 0x5b, 0x4d, 0x2c, 0x33, 0xde, 0x30, 0x44, 0xc7, + 0x1b, 0x51, 0xc7, 0x1b, 0x62, 0xb4, 0x64, 0xc7, 0x1b, 0x2b, 0xa6, 0x4d, 0x24, 0xb6, 0x92, 0x40, + 0xea, 0x9f, 0x81, 0xb4, 0xd3, 0x4a, 0x70, 0xd4, 0x4e, 0x7f, 0xaf, 0xec, 0xa0, 0x45, 0x45, 0x76, + 0x1f, 0x97, 0x3d, 0x75, 0xaa, 0x6c, 0xa1, 0x48, 0xd1, 0xbd, 0x01, 0xaf, 0x2a, 0xb7, 0xb0, 0x2a, + 0x9a, 0x37, 0xc3, 0x45, 0xa2, 0x69, 0x78, 0x9e, 0xd1, 0x57, 0xe1, 0x6b, 0x33, 0x20, 0x12, 0x3d, + 0xda, 0x3f, 0x09, 0xa6, 0x87, 0x2a, 0xe9, 0xb0, 0xfe, 0x1e, 0xc0, 0xf1, 0xe3, 0xb3, 0xc9, 0x5a, + 0xad, 0xc3, 0xc1, 0x7a, 0x22, 0x2e, 0xef, 0xe3, 0x7e, 0x87, 0x25, 0x93, 0x2c, 0xb2, 0x72, 0x0a, + 0xb3, 0xbe, 0x70, 0xbc, 0x12, 0x96, 0xa5, 0x83, 0x3f, 0x80, 0xd6, 0x10, 0xa7, 0x48, 0xa4, 0x9f, + 0x0d, 0x38, 0x94, 0xcc, 0x1a, 0x8f, 0xe3, 0x83, 0xee, 0x0c, 0xc5, 0xd3, 0xa8, 0x72, 0xcf, 0x7e, + 0x1a, 0x80, 0x79, 0x2e, 0x07, 0x7d, 0x07, 0x70, 0x58, 0x9d, 0x5f, 0xb4, 0x9c, 0x3d, 0xe5, 0x89, + 0xfb, 0xa9, 0xb0, 0xd2, 0x3b, 0x42, 0x51, 0x2c, 0xbd, 0xf8, 0xee, 0xdb, 0x9f, 0x8f, 0x7d, 0x63, + 0xe8, 0x0a, 0xb6, 0x6a, 0x6e, 0x72, 0xa3, 0x32, 0xbc, 0xdd, 0x74, 0xac, 0x1d, 0xf4, 0x15, 0xc0, + 0x3c, 0xaf, 0x03, 0x2a, 0x77, 0x9e, 0x3c, 0x39, 0xfd, 0x85, 0xc5, 0xae, 0x79, 0xa4, 0xf6, 0xeb, + 0x5c, 0x7b, 0x11, 0x4d, 0xb4, 0xd1, 0x8e, 0xb7, 0xfd, 0xc8, 0xc1, 0x17, 0x00, 0x07, 0x38, 0x70, + 0xde, 0xed, 0xdc, 0x44, 0x6a, 0x85, 0x75, 0x6c, 0x22, 0xbd, 0xa9, 0xf4, 0x31, 0x6e, 0xe2, 0x12, + 0xba, 0x78, 0xc4, 0x04, 0xfa, 0x0b, 0xe0, 0x60, 0xb2, 0x05, 0xd1, 0x52, 0x97, 0x95, 0x53, 0xf7, + 0x4c, 0xe1, 0x49, 0xaf, 0xe8, 0xa4, 0x95, 0x3b, 0xdc, 0xca, 0x1c, 0x2a, 0x25, 0xac, 0xc4, 0xbf, + 0xcb, 0xc9, 0x1b, 0xc1, 0xdb, 0xa9, 0x1d, 0xb5, 0x83, 0x7e, 0x02, 0x38, 0xa4, 0x4c, 0x1b, 0xea, + 0x91, 0xb8, 0xff, 0xa3, 0xb3, 0xdc, 0x33, 0x3e, 0xe9, 0x76, 0x8a, 0xbb, 0xbd, 0x86, 0x8a, 0xa7, + 0xb8, 0x5d, 0x78, 0xb9, 0x77, 0xa0, 0x81, 0xfd, 0x03, 0x0d, 0xfc, 0x3e, 0xd0, 0xc0, 0xee, 0xa1, + 0x96, 0xdb, 0x3f, 0xd4, 0x72, 0x3f, 0x0e, 0xb5, 0xdc, 0xf3, 0xb2, 0xed, 0x84, 0xeb, 0x8d, 0xaa, + 0x51, 0xa3, 0x75, 0x2c, 0xd4, 0xcd, 0x1c, 0xf7, 0xf0, 0x99, 0x69, 0xe9, 0x9b, 0x91, 0x4f, 0x9f, + 0x37, 0x32, 0x61, 0xb8, 0xe5, 0x13, 0x56, 0x3d, 0xcb, 0x9f, 0x37, 0x73, 0xff, 0x02, 0x00, 0x00, + 0xff, 0xff, 0xb5, 0xf3, 0x3c, 0xb5, 0xc3, 0x0a, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -806,7 +808,7 @@ var _Query_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "model/query.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/model/query.proto", } func (m *QueryGetVendorProductsRequest) Marshal() (dAtA []byte, err error) { diff --git a/x/model/types/query.pb.gw.go b/x/model/types/query.pb.gw.go index 337a48713..cd7d829a8 100644 --- a/x/model/types/query.pb.gw.go +++ b/x/model/types/query.pb.gw.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: model/query.proto +// source: zigbeealliance/distributedcomplianceledger/model/query.proto /* Package types is a reverse proxy. @@ -639,15 +639,15 @@ func RegisterQueryHandlerClient(ctx context.Context, mux *runtime.ServeMux, clie } var ( - pattern_Query_VendorProducts_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "model", "models", "vid"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_VendorProducts_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "model", "models", "vid"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_Model_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"dcl", "model", "models", "vid", "pid"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_Model_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"dcl", "model", "models", "vid", "pid"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_ModelAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "model", "models"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_ModelAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "model", "models"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_ModelVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"dcl", "model", "versions", "vid", "pid", "softwareVersion"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_ModelVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"dcl", "model", "versions", "vid", "pid", "softwareVersion"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_ModelVersions_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"dcl", "model", "versions", "vid", "pid"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_ModelVersions_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"dcl", "model", "versions", "vid", "pid"}, "", runtime.AssumeColonVerbOpt(false))) ) var ( diff --git a/x/model/types/tx.pb.go b/x/model/types/tx.pb.go index 188cdbabd..825fd3c01 100644 --- a/x/model/types/tx.pb.go +++ b/x/model/types/tx.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: model/tx.proto +// source: zigbeealliance/distributedcomplianceledger/model/tx.proto package types @@ -7,9 +7,9 @@ import ( context "context" fmt "fmt" _ "github.com/cosmos/cosmos-proto" - _ "github.com/gogo/protobuf/gogoproto" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" @@ -55,7 +55,7 @@ func (m *MsgCreateModel) Reset() { *m = MsgCreateModel{} } func (m *MsgCreateModel) String() string { return proto.CompactTextString(m) } func (*MsgCreateModel) ProtoMessage() {} func (*MsgCreateModel) Descriptor() ([]byte, []int) { - return fileDescriptor_8898c625fa4755dc, []int{0} + return fileDescriptor_bc57f11ad3062327, []int{0} } func (m *MsgCreateModel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -224,7 +224,7 @@ func (m *MsgCreateModelResponse) Reset() { *m = MsgCreateModelResponse{} func (m *MsgCreateModelResponse) String() string { return proto.CompactTextString(m) } func (*MsgCreateModelResponse) ProtoMessage() {} func (*MsgCreateModelResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_8898c625fa4755dc, []int{1} + return fileDescriptor_bc57f11ad3062327, []int{1} } func (m *MsgCreateModelResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -277,7 +277,7 @@ func (m *MsgUpdateModel) Reset() { *m = MsgUpdateModel{} } func (m *MsgUpdateModel) String() string { return proto.CompactTextString(m) } func (*MsgUpdateModel) ProtoMessage() {} func (*MsgUpdateModel) Descriptor() ([]byte, []int) { - return fileDescriptor_8898c625fa4755dc, []int{2} + return fileDescriptor_bc57f11ad3062327, []int{2} } func (m *MsgUpdateModel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -432,7 +432,7 @@ func (m *MsgUpdateModelResponse) Reset() { *m = MsgUpdateModelResponse{} func (m *MsgUpdateModelResponse) String() string { return proto.CompactTextString(m) } func (*MsgUpdateModelResponse) ProtoMessage() {} func (*MsgUpdateModelResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_8898c625fa4755dc, []int{3} + return fileDescriptor_bc57f11ad3062327, []int{3} } func (m *MsgUpdateModelResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -471,7 +471,7 @@ func (m *MsgDeleteModel) Reset() { *m = MsgDeleteModel{} } func (m *MsgDeleteModel) String() string { return proto.CompactTextString(m) } func (*MsgDeleteModel) ProtoMessage() {} func (*MsgDeleteModel) Descriptor() ([]byte, []int) { - return fileDescriptor_8898c625fa4755dc, []int{4} + return fileDescriptor_bc57f11ad3062327, []int{4} } func (m *MsgDeleteModel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -528,7 +528,7 @@ func (m *MsgDeleteModelResponse) Reset() { *m = MsgDeleteModelResponse{} func (m *MsgDeleteModelResponse) String() string { return proto.CompactTextString(m) } func (*MsgDeleteModelResponse) ProtoMessage() {} func (*MsgDeleteModelResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_8898c625fa4755dc, []int{5} + return fileDescriptor_bc57f11ad3062327, []int{5} } func (m *MsgDeleteModelResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -580,7 +580,7 @@ func (m *MsgCreateModelVersion) Reset() { *m = MsgCreateModelVersion{} } func (m *MsgCreateModelVersion) String() string { return proto.CompactTextString(m) } func (*MsgCreateModelVersion) ProtoMessage() {} func (*MsgCreateModelVersion) Descriptor() ([]byte, []int) { - return fileDescriptor_8898c625fa4755dc, []int{6} + return fileDescriptor_bc57f11ad3062327, []int{6} } func (m *MsgCreateModelVersion) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -728,7 +728,7 @@ func (m *MsgCreateModelVersionResponse) Reset() { *m = MsgCreateModelVer func (m *MsgCreateModelVersionResponse) String() string { return proto.CompactTextString(m) } func (*MsgCreateModelVersionResponse) ProtoMessage() {} func (*MsgCreateModelVersionResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_8898c625fa4755dc, []int{7} + return fileDescriptor_bc57f11ad3062327, []int{7} } func (m *MsgCreateModelVersionResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -776,7 +776,7 @@ func (m *MsgUpdateModelVersion) Reset() { *m = MsgUpdateModelVersion{} } func (m *MsgUpdateModelVersion) String() string { return proto.CompactTextString(m) } func (*MsgUpdateModelVersion) ProtoMessage() {} func (*MsgUpdateModelVersion) Descriptor() ([]byte, []int) { - return fileDescriptor_8898c625fa4755dc, []int{8} + return fileDescriptor_bc57f11ad3062327, []int{8} } func (m *MsgUpdateModelVersion) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -896,7 +896,7 @@ func (m *MsgUpdateModelVersionResponse) Reset() { *m = MsgUpdateModelVer func (m *MsgUpdateModelVersionResponse) String() string { return proto.CompactTextString(m) } func (*MsgUpdateModelVersionResponse) ProtoMessage() {} func (*MsgUpdateModelVersionResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_8898c625fa4755dc, []int{9} + return fileDescriptor_bc57f11ad3062327, []int{9} } func (m *MsgUpdateModelVersionResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -936,7 +936,7 @@ func (m *MsgDeleteModelVersion) Reset() { *m = MsgDeleteModelVersion{} } func (m *MsgDeleteModelVersion) String() string { return proto.CompactTextString(m) } func (*MsgDeleteModelVersion) ProtoMessage() {} func (*MsgDeleteModelVersion) Descriptor() ([]byte, []int) { - return fileDescriptor_8898c625fa4755dc, []int{10} + return fileDescriptor_bc57f11ad3062327, []int{10} } func (m *MsgDeleteModelVersion) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1000,7 +1000,7 @@ func (m *MsgDeleteModelVersionResponse) Reset() { *m = MsgDeleteModelVer func (m *MsgDeleteModelVersionResponse) String() string { return proto.CompactTextString(m) } func (*MsgDeleteModelVersionResponse) ProtoMessage() {} func (*MsgDeleteModelVersionResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_8898c625fa4755dc, []int{11} + return fileDescriptor_bc57f11ad3062327, []int{11} } func (m *MsgDeleteModelVersionResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1044,97 +1044,99 @@ func init() { proto.RegisterType((*MsgDeleteModelVersionResponse)(nil), "zigbeealliance.distributedcomplianceledger.model.MsgDeleteModelVersionResponse") } -func init() { proto.RegisterFile("model/tx.proto", fileDescriptor_8898c625fa4755dc) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/model/tx.proto", fileDescriptor_bc57f11ad3062327) +} -var fileDescriptor_8898c625fa4755dc = []byte{ - // 1383 bytes of a gzipped FileDescriptorProto +var fileDescriptor_bc57f11ad3062327 = []byte{ + // 1388 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x59, 0xcd, 0x6f, 0xdb, 0xb6, - 0x1b, 0xae, 0x7e, 0xcd, 0x27, 0x9d, 0x8f, 0x86, 0xfd, 0xd2, 0xcf, 0xed, 0xac, 0x4c, 0x05, 0x86, - 0xa0, 0x48, 0xe2, 0xd4, 0x69, 0xb2, 0xa2, 0x58, 0x80, 0xd6, 0xe9, 0xda, 0x04, 0x68, 0xda, 0x42, + 0x1b, 0xae, 0x7e, 0xcd, 0x27, 0x9d, 0x8f, 0x96, 0xfd, 0xd2, 0xcf, 0xed, 0xac, 0x4c, 0x05, 0x86, + 0xa0, 0x48, 0xe2, 0xd4, 0x69, 0xb2, 0xae, 0x68, 0x80, 0xd6, 0xe9, 0xda, 0x04, 0x68, 0xda, 0x42, 0x69, 0x8b, 0xa1, 0xc0, 0xda, 0xc9, 0x12, 0xed, 0x10, 0x93, 0x44, 0x8d, 0xa4, 0xf3, 0x75, 0xde, 0x71, 0x87, 0x5d, 0x77, 0xd9, 0x9f, 0xb0, 0xd3, 0xce, 0xdb, 0x6d, 0xd8, 0xb1, 0xd8, 0x69, 0x27, 0x61, 0x68, 0x81, 0x9d, 0x76, 0x18, 0xf4, 0x17, 0x0c, 0xa2, 0xe5, 0x58, 0xb2, 0xe5, 0x8f, 0x58, - 0xf6, 0x21, 0xbd, 0x18, 0x16, 0xc9, 0xe7, 0xe1, 0xfb, 0x92, 0xef, 0xfb, 0xbc, 0xa4, 0x04, 0x66, - 0x6c, 0x62, 0x22, 0x2b, 0xcf, 0x0f, 0x97, 0x5d, 0x4a, 0x38, 0x81, 0x2b, 0xc7, 0xb8, 0x52, 0x42, - 0x48, 0xb7, 0x2c, 0xac, 0x3b, 0x06, 0x5a, 0x36, 0x31, 0xe3, 0x14, 0x97, 0xaa, 0x1c, 0x99, 0x06, - 0xb1, 0xdd, 0x5a, 0xab, 0x85, 0xcc, 0x0a, 0xa2, 0xcb, 0x02, 0x9a, 0x9d, 0xab, 0x31, 0x88, 0xdf, - 0x1a, 0x49, 0xf6, 0xff, 0x91, 0xa6, 0x37, 0xfb, 0x88, 0x32, 0x4c, 0x9c, 0xb0, 0xeb, 0x52, 0x85, - 0x54, 0x88, 0xf8, 0x9b, 0x0f, 0xfe, 0xd5, 0x01, 0x06, 0x61, 0x36, 0x61, 0x6f, 0x6a, 0x1d, 0xb5, - 0x87, 0x5a, 0x97, 0xfa, 0xf7, 0x14, 0x98, 0xd9, 0x61, 0x95, 0x4d, 0x8a, 0x74, 0x8e, 0x76, 0x02, - 0x46, 0xb8, 0x0d, 0xc6, 0x8d, 0xe0, 0x91, 0x50, 0x59, 0x9a, 0x97, 0x16, 0x26, 0x8b, 0x79, 0xdf, - 0x53, 0x2e, 0xee, 0xeb, 0x16, 0x36, 0x75, 0x8e, 0xee, 0xaa, 0x14, 0x7d, 0x53, 0xc5, 0x14, 0x99, - 0xea, 0x1f, 0x3f, 0x2f, 0x5d, 0x0a, 0xc9, 0xee, 0x9b, 0x26, 0x45, 0x8c, 0xed, 0x72, 0x8a, 0x9d, - 0x8a, 0x56, 0xc7, 0xc3, 0x15, 0x70, 0x7e, 0x1f, 0x9b, 0xf2, 0xff, 0xe6, 0xa5, 0x85, 0xd1, 0x62, - 0xce, 0xf7, 0x94, 0x6c, 0x83, 0xa6, 0xc2, 0xd1, 0xc6, 0xad, 0x45, 0x8b, 0xa3, 0x8d, 0xf5, 0xb5, - 0xb5, 0xd5, 0x35, 0x55, 0x0b, 0x86, 0x06, 0x08, 0x17, 0x9b, 0xf2, 0xf9, 0xde, 0x10, 0x2e, 0x36, - 0x61, 0x11, 0x4c, 0x99, 0x68, 0x1f, 0x1b, 0xe8, 0xf9, 0x91, 0x8b, 0xb6, 0x4d, 0x79, 0xa4, 0x1d, - 0x74, 0x25, 0x0a, 0x8d, 0x61, 0xe0, 0x7d, 0x90, 0x71, 0x29, 0x31, 0xab, 0x06, 0x7f, 0xa2, 0xdb, - 0x48, 0x1e, 0x15, 0x6e, 0x2b, 0xbe, 0xa7, 0x5c, 0x6b, 0x75, 0x7b, 0xd1, 0xd6, 0x0f, 0x37, 0x6e, - 0x15, 0xee, 0xa8, 0x5a, 0x14, 0x03, 0xef, 0x82, 0xa9, 0xf0, 0xf1, 0xb1, 0x5e, 0x42, 0x96, 0x3c, - 0x26, 0x38, 0xae, 0xf8, 0x9e, 0x02, 0x1b, 0x1c, 0x01, 0xb4, 0xb0, 0xb6, 0xae, 0x6a, 0xb1, 0xb1, - 0x70, 0x0d, 0x00, 0x57, 0xa7, 0xfc, 0x49, 0xd5, 0x2e, 0x21, 0x2a, 0x8f, 0x0b, 0xe4, 0x65, 0xdf, - 0x53, 0xe6, 0xe2, 0xc8, 0xd5, 0x82, 0xaa, 0x45, 0x06, 0xc2, 0xe7, 0xe0, 0xaa, 0x41, 0x6c, 0x1b, - 0xb3, 0x20, 0x00, 0xb0, 0x53, 0xd9, 0xac, 0x32, 0x4e, 0xec, 0x87, 0x16, 0x39, 0x90, 0x27, 0xc4, - 0x22, 0x64, 0x7d, 0x4f, 0xb9, 0x92, 0xb4, 0x08, 0x05, 0x55, 0x6b, 0x07, 0x85, 0x3f, 0x4a, 0x20, - 0xdb, 0xa6, 0xef, 0x05, 0xb5, 0xe4, 0x49, 0x61, 0xdd, 0x6b, 0xdf, 0x53, 0x5e, 0xb5, 0xae, 0xcd, - 0x1b, 0x5c, 0xde, 0xd8, 0x4c, 0xc6, 0xce, 0x17, 0x16, 0x89, 0x8d, 0x39, 0xb2, 0x5d, 0x7e, 0xb4, - 0x58, 0xa5, 0xd6, 0x22, 0xe3, 0x3a, 0xe5, 0xcc, 0x21, 0xfc, 0x00, 0xf3, 0xbd, 0x8d, 0x3d, 0xce, - 0xdd, 0xbb, 0x8b, 0x27, 0xeb, 0xd3, 0xc1, 0x02, 0xf8, 0x18, 0x7c, 0x1c, 0xeb, 0x0d, 0xa2, 0x76, - 0xdb, 0xc1, 0x1c, 0xeb, 0xd6, 0x2e, 0x47, 0x2e, 0xdb, 0xc2, 0x0e, 0x97, 0xc1, 0xbc, 0xb4, 0x30, - 0xad, 0x75, 0x1f, 0x08, 0x19, 0x58, 0xe8, 0x38, 0x68, 0xdb, 0x61, 0x9c, 0x56, 0x0d, 0x8e, 0x89, - 0x23, 0x67, 0x84, 0xef, 0x57, 0xe3, 0xe9, 0x20, 0xc2, 0x61, 0xa5, 0x70, 0x5b, 0xd5, 0x7a, 0x26, - 0x82, 0xcf, 0xc0, 0x8d, 0x96, 0xb1, 0xbb, 0xc8, 0x20, 0x8e, 0xa9, 0xd3, 0xa3, 0x86, 0x13, 0x53, - 0xc2, 0x89, 0x5e, 0x86, 0xc2, 0x03, 0x70, 0xb3, 0xcb, 0xb0, 0xa8, 0x23, 0xd3, 0x9d, 0x1d, 0x39, - 0x05, 0x15, 0x7c, 0x0d, 0xa6, 0xab, 0x0c, 0xd1, 0x1d, 0xdd, 0xa9, 0xea, 0x56, 0x10, 0x20, 0x33, - 0x82, 0xfb, 0x8e, 0xef, 0x29, 0xb7, 0x1b, 0xdc, 0xa7, 0xd8, 0xfa, 0x38, 0x1d, 0xfc, 0x02, 0x00, - 0x56, 0x75, 0x5d, 0x42, 0x79, 0x40, 0x3e, 0x9b, 0x92, 0x3c, 0xc2, 0x15, 0x30, 0x87, 0x59, 0x18, - 0x30, 0x5f, 0x48, 0xcb, 0xdc, 0xe0, 0x82, 0xcf, 0xc0, 0x98, 0xc5, 0xca, 0x01, 0xeb, 0x5c, 0x4a, - 0xd6, 0x90, 0x07, 0x3e, 0x00, 0xd3, 0xcc, 0xd8, 0x43, 0xb6, 0xfe, 0xb2, 0x26, 0xf7, 0x32, 0x0c, - 0x42, 0xa3, 0xab, 0xca, 0xc5, 0x41, 0xf0, 0x18, 0x5c, 0x6b, 0xec, 0x2c, 0xa2, 0x1a, 0xb2, 0x09, - 0x47, 0x2f, 0x70, 0x3d, 0xb5, 0x2f, 0xa6, 0x34, 0xb6, 0x13, 0xb9, 0x2a, 0x83, 0x2b, 0xf1, 0x3a, - 0xa3, 0x21, 0xe6, 0x12, 0x87, 0x21, 0xf5, 0xbb, 0x8c, 0x28, 0x41, 0x2f, 0x5c, 0xf3, 0xac, 0x96, - 0xa0, 0x3b, 0xf1, 0xf2, 0x31, 0xd2, 0x4e, 0xfa, 0xbb, 0x57, 0x8d, 0xd1, 0xbe, 0xab, 0xc6, 0x58, - 0xaf, 0x55, 0xe3, 0xb0, 0xa3, 0xbc, 0x8f, 0xa7, 0x8c, 0x81, 0x4e, 0xc2, 0x7d, 0x1a, 0xa9, 0x9d, - 0x18, 0x94, 0xd4, 0x9e, 0x4e, 0x18, 0x27, 0x87, 0x28, 0x8c, 0x60, 0x98, 0xc2, 0x98, 0x19, 0x9a, - 0x30, 0x4e, 0x0d, 0x45, 0x18, 0xa7, 0x07, 0x24, 0x8c, 0xf7, 0x40, 0xc6, 0x62, 0x65, 0x0d, 0xed, - 0x63, 0x21, 0x8b, 0x33, 0x3d, 0x1d, 0xfe, 0xa2, 0x90, 0x56, 0x69, 0x9d, 0x1d, 0x82, 0xb4, 0x5e, - 0x18, 0xa2, 0xb4, 0xf6, 0x76, 0x20, 0x9a, 0xeb, 0xf1, 0x40, 0x14, 0x0a, 0x75, 0x44, 0x8d, 0x4f, - 0x84, 0xfa, 0x57, 0x49, 0x08, 0xf5, 0x03, 0x64, 0xa1, 0xb3, 0x29, 0xd4, 0xa1, 0x6f, 0x11, 0x07, - 0x4e, 0x7c, 0xfb, 0x65, 0x12, 0x5c, 0x8e, 0xd7, 0xa7, 0xfa, 0xce, 0x9e, 0xb1, 0x5a, 0xb4, 0x00, - 0x66, 0x19, 0x29, 0xf3, 0x03, 0x9d, 0xa2, 0x7a, 0x40, 0x8f, 0x88, 0xad, 0x6f, 0x6e, 0x86, 0xcf, - 0xc1, 0xe5, 0xa6, 0xa6, 0x9a, 0xbd, 0x61, 0x11, 0x6a, 0x9a, 0x2d, 0x76, 0xfd, 0x59, 0xbf, 0xad, - 0x6a, 0xc9, 0x60, 0xb8, 0x05, 0x66, 0x0d, 0x33, 0x6c, 0x8a, 0x94, 0xa6, 0xee, 0x49, 0xd9, 0x0c, - 0x83, 0x5b, 0xe0, 0x62, 0x19, 0x53, 0x3b, 0x98, 0x62, 0xdb, 0x29, 0x13, 0x6a, 0xeb, 0x42, 0xa2, - 0xc7, 0xdb, 0x95, 0xc8, 0xb5, 0x5b, 0x05, 0x55, 0x4b, 0x82, 0xc0, 0x02, 0xb8, 0xd4, 0x64, 0xec, - 0xcb, 0x00, 0x29, 0x8a, 0xcc, 0x84, 0x96, 0xd8, 0x17, 0x48, 0x15, 0xe1, 0x7a, 0xe3, 0xc6, 0x93, - 0x42, 0xaa, 0x6a, 0x3c, 0xf0, 0x73, 0x90, 0x21, 0x5c, 0x7f, 0x88, 0x2d, 0xb4, 0x8b, 0x8f, 0x91, - 0x28, 0x07, 0x23, 0xc5, 0x1b, 0xbe, 0xa7, 0x28, 0x09, 0x17, 0x29, 0x41, 0xf4, 0x54, 0x00, 0x55, - 0x2d, 0x8a, 0x83, 0x8f, 0x05, 0xcd, 0xe6, 0x1e, 0x32, 0xbe, 0x66, 0x55, 0x3b, 0x14, 0xfe, 0x9b, - 0xbe, 0xa7, 0x7c, 0xd2, 0x99, 0xe6, 0x64, 0xe3, 0xa2, 0x70, 0xf8, 0x25, 0x98, 0x8d, 0x3c, 0x06, - 0xd7, 0x61, 0x21, 0xf8, 0xa3, 0xc5, 0x55, 0xdf, 0x53, 0xf2, 0x5d, 0x18, 0x5b, 0xf7, 0xb0, 0x89, - 0x0b, 0x16, 0xc1, 0x75, 0x1b, 0x3b, 0xf7, 0x5d, 0xd7, 0xc2, 0x86, 0x5e, 0xb2, 0xd0, 0x6e, 0x53, - 0x68, 0x4e, 0x8b, 0xd0, 0xec, 0x38, 0x06, 0x1e, 0x83, 0xeb, 0xb6, 0x7e, 0xd8, 0x9e, 0x63, 0x46, - 0xe8, 0xf5, 0xba, 0xef, 0x29, 0x85, 0x58, 0x78, 0x19, 0xac, 0x8c, 0x91, 0x65, 0x6e, 0xec, 0x74, - 0x20, 0x57, 0xb5, 0x8e, 0xdc, 0xb0, 0x04, 0x66, 0x29, 0xb2, 0x90, 0xce, 0xd0, 0x13, 0xc2, 0x11, - 0x1b, 0xc4, 0x15, 0xa4, 0x99, 0xb0, 0xb5, 0x00, 0x5d, 0xe8, 0xa3, 0x00, 0xa9, 0x0a, 0xf8, 0x28, - 0x51, 0xbf, 0x4e, 0x14, 0xee, 0xb7, 0x31, 0xa1, 0x70, 0x11, 0x61, 0xff, 0xe0, 0x15, 0xae, 0x5d, - 0xde, 0x8f, 0xf6, 0x94, 0xf7, 0x63, 0x03, 0xca, 0xfb, 0x6e, 0x39, 0x30, 0xde, 0x43, 0x0e, 0x7c, - 0x2b, 0x75, 0x49, 0x82, 0x09, 0x11, 0x33, 0xf7, 0x7c, 0x4f, 0xf9, 0x2c, 0xc9, 0xd8, 0xe1, 0xa5, - 0xc3, 0xe4, 0xa0, 0xd3, 0x61, 0x3e, 0x41, 0x26, 0xe3, 0x0a, 0xf8, 0x69, 0x92, 0x02, 0x26, 0xdc, - 0x7c, 0x5a, 0xc4, 0xae, 0x25, 0xd3, 0xa6, 0xfa, 0xcf, 0xb4, 0xd6, 0x3c, 0x3a, 0xc9, 0xb4, 0x7f, - 0x25, 0x91, 0x69, 0x91, 0x63, 0xc6, 0x87, 0x9e, 0x69, 0xe1, 0x9a, 0xb4, 0x7a, 0x5c, 0x5f, 0x93, - 0xc2, 0x3f, 0xe3, 0xe0, 0xfc, 0x0e, 0xab, 0xc0, 0x1f, 0x24, 0x90, 0x89, 0xbe, 0x6c, 0xbe, 0xb7, - 0x7c, 0xda, 0x37, 0xe2, 0xcb, 0x71, 0x99, 0xcb, 0x6e, 0xa5, 0x65, 0xa8, 0xdb, 0x28, 0x6c, 0x8b, - 0xbe, 0x85, 0xe8, 0xcf, 0xb6, 0x08, 0x43, 0x9f, 0xb6, 0x25, 0x9c, 0xbd, 0x85, 0x6d, 0xd1, 0x83, - 0x77, 0x7f, 0xb6, 0x45, 0x18, 0xfa, 0xb4, 0x2d, 0xe1, 0xec, 0x0c, 0x7f, 0x92, 0x00, 0x4c, 0x38, - 0x38, 0x3f, 0x4a, 0xbb, 0x31, 0x21, 0x51, 0xf6, 0xe9, 0x80, 0x88, 0x62, 0x06, 0x27, 0xd4, 0xc1, - 0x47, 0x69, 0x77, 0x2b, 0x9d, 0xc1, 0xed, 0x15, 0x45, 0x18, 0x9c, 0x20, 0x27, 0x8f, 0xd2, 0x6e, - 0x61, 0x3a, 0x83, 0xdb, 0xa7, 0x7b, 0xf1, 0xab, 0xdf, 0xdf, 0xe5, 0xa4, 0xb7, 0xef, 0x72, 0xd2, - 0x5f, 0xef, 0x72, 0xd2, 0xf7, 0xef, 0x73, 0xe7, 0xde, 0xbe, 0xcf, 0x9d, 0xfb, 0xf3, 0x7d, 0xee, - 0xdc, 0xab, 0x87, 0x15, 0xcc, 0xf7, 0xaa, 0xa5, 0x65, 0x83, 0xd8, 0xf9, 0xda, 0xa4, 0x4b, 0xf5, - 0x59, 0xf3, 0x91, 0x59, 0x97, 0x1a, 0xd3, 0x2e, 0xd5, 0xe6, 0xcd, 0x1f, 0xe6, 0xc3, 0x8f, 0x69, - 0x47, 0x2e, 0x62, 0xa5, 0x31, 0xf1, 0xfd, 0x6a, 0xf5, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xbc, - 0xb8, 0x71, 0x54, 0x62, 0x1b, 0x00, 0x00, + 0xf6, 0x21, 0xbd, 0x18, 0x16, 0xc9, 0xe7, 0xe1, 0xfb, 0x92, 0xef, 0xfb, 0xbc, 0xa4, 0x04, 0x3e, + 0x3b, 0xc2, 0x95, 0x12, 0x42, 0xba, 0x65, 0x61, 0xdd, 0x31, 0x50, 0xde, 0xc4, 0x8c, 0x53, 0x5c, + 0xaa, 0x72, 0x64, 0x1a, 0xc4, 0x76, 0x6b, 0xad, 0x16, 0x32, 0x2b, 0x88, 0xe6, 0x6d, 0x62, 0x22, + 0x2b, 0xcf, 0x0f, 0x96, 0x5c, 0x4a, 0x38, 0x81, 0xcb, 0x71, 0xe8, 0x52, 0x07, 0xe8, 0x92, 0x80, + 0x66, 0xef, 0x9e, 0x78, 0x32, 0xf1, 0x5b, 0x9b, 0x2f, 0xfb, 0xa0, 0x3f, 0xf4, 0x9b, 0x3d, 0x44, + 0x19, 0x26, 0x4e, 0xc8, 0x72, 0xb1, 0x42, 0x2a, 0x44, 0xfc, 0xcd, 0x07, 0xff, 0xc2, 0xd6, 0xff, + 0x1b, 0x84, 0xd9, 0x84, 0xbd, 0xa9, 0x75, 0xd4, 0x1e, 0x6a, 0x5d, 0xea, 0xdf, 0x53, 0x60, 0x66, + 0x9b, 0x55, 0x36, 0x28, 0xd2, 0x39, 0xda, 0x0e, 0x18, 0xe1, 0x16, 0x18, 0x37, 0x82, 0x47, 0x42, + 0x65, 0x69, 0x4e, 0x9a, 0x9f, 0x2c, 0xe6, 0x7d, 0x4f, 0xb9, 0xb0, 0xa7, 0x5b, 0xd8, 0xd4, 0x39, + 0xba, 0xa3, 0x52, 0xf4, 0x4d, 0x15, 0x53, 0x64, 0xaa, 0x7f, 0xfc, 0xbc, 0x78, 0x31, 0x24, 0xbb, + 0x6f, 0x9a, 0x14, 0x31, 0xb6, 0xc3, 0x29, 0x76, 0x2a, 0x5a, 0x1d, 0x0f, 0x97, 0xc1, 0xd9, 0x3d, + 0x6c, 0xca, 0xff, 0x9b, 0x93, 0xe6, 0x47, 0x8b, 0x39, 0xdf, 0x53, 0xb2, 0x0d, 0x9a, 0x0a, 0x47, + 0xeb, 0x37, 0x17, 0x2c, 0x8e, 0xd6, 0xd7, 0x56, 0x57, 0x57, 0x56, 0x55, 0x2d, 0x18, 0x1a, 0x20, + 0x5c, 0x6c, 0xca, 0x67, 0x7b, 0x43, 0xb8, 0xd8, 0x84, 0x45, 0x30, 0x65, 0xa2, 0x3d, 0x6c, 0xa0, + 0xe7, 0x87, 0x2e, 0xda, 0x32, 0xe5, 0x91, 0x76, 0xd0, 0xe5, 0x28, 0x34, 0x86, 0x81, 0xf7, 0x41, + 0xc6, 0xa5, 0xc4, 0xac, 0x1a, 0xfc, 0x89, 0x6e, 0x23, 0x79, 0x54, 0xb8, 0xad, 0xf8, 0x9e, 0x72, + 0xb5, 0xd5, 0xed, 0x05, 0x5b, 0x3f, 0x58, 0xbf, 0x59, 0xb8, 0xad, 0x6a, 0x51, 0x0c, 0xbc, 0x03, + 0xa6, 0xc2, 0xc7, 0xc7, 0x7a, 0x09, 0x59, 0xf2, 0x98, 0xe0, 0xb8, 0xec, 0x7b, 0x0a, 0x6c, 0x70, + 0x04, 0xd0, 0xc2, 0xea, 0x9a, 0xaa, 0xc5, 0xc6, 0xc2, 0x55, 0x00, 0x5c, 0x9d, 0xf2, 0x27, 0x55, + 0xbb, 0x84, 0xa8, 0x3c, 0x2e, 0x90, 0x97, 0x7c, 0x4f, 0x39, 0x1f, 0x47, 0xae, 0x14, 0x54, 0x2d, + 0x32, 0x10, 0x3e, 0x07, 0x57, 0x0c, 0x62, 0xdb, 0x98, 0x05, 0x01, 0x80, 0x9d, 0xca, 0x46, 0x95, + 0x71, 0x62, 0x3f, 0xb4, 0xc8, 0xbe, 0x3c, 0x21, 0x16, 0x21, 0xeb, 0x7b, 0xca, 0xe5, 0xa4, 0x45, + 0x28, 0xa8, 0x5a, 0x3b, 0x28, 0xfc, 0x51, 0x02, 0xd9, 0x36, 0x7d, 0x2f, 0xa8, 0x25, 0x4f, 0x0a, + 0xeb, 0x5e, 0xfb, 0x9e, 0xf2, 0xaa, 0x75, 0x6d, 0xde, 0xe0, 0xf2, 0xfa, 0x46, 0x32, 0x76, 0xae, + 0xb0, 0x40, 0x6c, 0xcc, 0x91, 0xed, 0xf2, 0xc3, 0x85, 0x2a, 0xb5, 0x16, 0x18, 0xd7, 0x29, 0x67, + 0x0e, 0xe1, 0xfb, 0x98, 0xef, 0xae, 0xef, 0x72, 0xee, 0xde, 0x59, 0x38, 0x5e, 0x9f, 0x0e, 0x16, + 0xc0, 0xc7, 0xe0, 0xe3, 0x58, 0x6f, 0x10, 0xb5, 0x5b, 0x0e, 0xe6, 0x58, 0xb7, 0x76, 0x38, 0x72, + 0xd9, 0x26, 0x76, 0xb8, 0x0c, 0xe6, 0xa4, 0xf9, 0x69, 0xad, 0xfb, 0x40, 0xc8, 0xc0, 0x7c, 0xc7, + 0x41, 0x5b, 0x0e, 0xe3, 0xb4, 0x6a, 0x70, 0x4c, 0x1c, 0x39, 0x23, 0x7c, 0xbf, 0x12, 0x4f, 0x07, + 0x11, 0x0e, 0xcb, 0x85, 0x5b, 0xaa, 0xd6, 0x33, 0x11, 0x7c, 0x06, 0xae, 0xb7, 0x8c, 0xdd, 0x41, + 0x06, 0x71, 0x4c, 0x9d, 0x1e, 0x36, 0x9c, 0x98, 0x12, 0x4e, 0xf4, 0x32, 0x14, 0xee, 0x83, 0x1b, + 0x5d, 0x86, 0x45, 0x1d, 0x99, 0xee, 0xec, 0xc8, 0x09, 0xa8, 0xe0, 0x6b, 0x30, 0x5d, 0x65, 0x88, + 0x6e, 0xeb, 0x4e, 0x55, 0xb7, 0x82, 0x00, 0x99, 0x11, 0xdc, 0xb7, 0x7d, 0x4f, 0xb9, 0xd5, 0xe0, + 0x3e, 0xc1, 0xd6, 0xc7, 0xe9, 0xe0, 0x17, 0x00, 0xb0, 0xaa, 0xeb, 0x12, 0xca, 0x03, 0xf2, 0xd9, + 0x94, 0xe4, 0x11, 0xae, 0x80, 0x39, 0xcc, 0xc2, 0x80, 0xf9, 0x5c, 0x5a, 0xe6, 0x06, 0x17, 0x7c, + 0x06, 0xc6, 0x2c, 0x56, 0x0e, 0x58, 0xcf, 0xa7, 0x64, 0x0d, 0x79, 0xe0, 0x03, 0x30, 0xcd, 0x8c, + 0x5d, 0x64, 0xeb, 0x2f, 0x6b, 0x72, 0x2f, 0xc3, 0x20, 0x34, 0xba, 0xaa, 0x5c, 0x1c, 0x04, 0x8f, + 0xc0, 0xd5, 0xc6, 0xce, 0x22, 0xaa, 0x21, 0x9b, 0x70, 0xf4, 0x02, 0xd7, 0x53, 0xfb, 0x42, 0x4a, + 0x63, 0x3b, 0x91, 0xab, 0x32, 0xb8, 0x1c, 0xaf, 0x33, 0x1a, 0x62, 0x2e, 0x71, 0x18, 0x52, 0xbf, + 0xcb, 0x88, 0x12, 0xf4, 0xc2, 0x35, 0x4f, 0x6b, 0x09, 0xba, 0x1d, 0x2f, 0x1f, 0x23, 0xed, 0xa4, + 0xbf, 0x7b, 0xd5, 0x18, 0xed, 0xbb, 0x6a, 0x8c, 0xf5, 0x5a, 0x35, 0x0e, 0x3a, 0xca, 0xfb, 0x78, + 0xca, 0x18, 0xe8, 0x24, 0xdc, 0x27, 0x91, 0xda, 0x89, 0x41, 0x49, 0xed, 0xc9, 0x84, 0x71, 0x72, + 0x88, 0xc2, 0x08, 0x86, 0x29, 0x8c, 0x99, 0xa1, 0x09, 0xe3, 0xd4, 0x50, 0x84, 0x71, 0x7a, 0x40, + 0xc2, 0x78, 0x0f, 0x64, 0x2c, 0x56, 0xd6, 0xd0, 0x1e, 0x16, 0xb2, 0x38, 0xd3, 0xd3, 0xe1, 0x2f, + 0x0a, 0x69, 0x95, 0xd6, 0xd9, 0x21, 0x48, 0xeb, 0xb9, 0x21, 0x4a, 0x6b, 0x6f, 0x07, 0xa2, 0xf3, + 0x3d, 0x1e, 0x88, 0x42, 0xa1, 0x8e, 0xa8, 0xf1, 0xb1, 0x50, 0xff, 0x2a, 0x09, 0xa1, 0x7e, 0x80, + 0x2c, 0x74, 0x3a, 0x85, 0x3a, 0xf4, 0x2d, 0xe2, 0xc0, 0xb1, 0x6f, 0xbf, 0x4c, 0x82, 0x4b, 0xf1, + 0xfa, 0x54, 0xdf, 0xd9, 0x53, 0x56, 0x8b, 0xe6, 0xc1, 0x2c, 0x23, 0x65, 0xbe, 0xaf, 0x53, 0x54, + 0x0f, 0xe8, 0x11, 0xb1, 0xf5, 0xcd, 0xcd, 0xf0, 0x39, 0xb8, 0xd4, 0xd4, 0x54, 0xb3, 0x37, 0x2c, + 0x42, 0x4d, 0xb3, 0xc5, 0xae, 0x3f, 0x6b, 0xb7, 0x54, 0x2d, 0x19, 0x0c, 0x37, 0xc1, 0xac, 0x61, + 0x86, 0x4d, 0x91, 0xd2, 0xd4, 0x3d, 0x29, 0x9b, 0x61, 0x70, 0x13, 0x5c, 0x28, 0x63, 0x6a, 0x07, + 0x53, 0x6c, 0x39, 0x65, 0x42, 0x6d, 0x5d, 0x48, 0xf4, 0x78, 0xbb, 0x12, 0xb9, 0x7a, 0xb3, 0xa0, + 0x6a, 0x49, 0x10, 0x58, 0x00, 0x17, 0x9b, 0x8c, 0x7d, 0x19, 0x20, 0x45, 0x91, 0x99, 0xd0, 0x12, + 0xfb, 0x02, 0xa9, 0x22, 0x5c, 0x6f, 0xdc, 0x78, 0x52, 0x48, 0x55, 0x8d, 0x07, 0x7e, 0x0e, 0x32, + 0x84, 0xeb, 0x0f, 0xb1, 0x85, 0x76, 0xf0, 0x11, 0x12, 0xe5, 0x60, 0xa4, 0x78, 0xdd, 0xf7, 0x14, + 0x25, 0xe1, 0x22, 0x25, 0x88, 0x9e, 0x0a, 0xa0, 0xaa, 0x45, 0x71, 0xf0, 0xb1, 0xa0, 0xd9, 0xd8, + 0x45, 0xc6, 0xd7, 0xac, 0x6a, 0x87, 0xc2, 0x7f, 0xc3, 0xf7, 0x94, 0x4f, 0x3a, 0xd3, 0x1c, 0x6f, + 0x5c, 0x14, 0x0e, 0xbf, 0x04, 0xb3, 0x91, 0xc7, 0xe0, 0x3a, 0x2c, 0x04, 0x7f, 0xb4, 0xb8, 0xe2, + 0x7b, 0x4a, 0xbe, 0x0b, 0x63, 0xeb, 0x1e, 0x36, 0x71, 0xc1, 0x22, 0xb8, 0x66, 0x63, 0xe7, 0xbe, + 0xeb, 0x5a, 0xd8, 0xd0, 0x4b, 0x16, 0xda, 0x69, 0x0a, 0xcd, 0x69, 0x11, 0x9a, 0x1d, 0xc7, 0xc0, + 0x23, 0x70, 0xcd, 0xd6, 0x0f, 0xda, 0x73, 0xcc, 0x08, 0xbd, 0x5e, 0xf3, 0x3d, 0xa5, 0x10, 0x0b, + 0x2f, 0x83, 0x95, 0x31, 0xb2, 0xcc, 0xf5, 0xed, 0x0e, 0xe4, 0xaa, 0xd6, 0x91, 0x1b, 0x96, 0xc0, + 0x2c, 0x45, 0x16, 0xd2, 0x19, 0x7a, 0x42, 0x38, 0x62, 0x83, 0xb8, 0x82, 0x34, 0x13, 0xb6, 0x16, + 0xa0, 0x73, 0x7d, 0x14, 0x20, 0x55, 0x01, 0x1f, 0x25, 0xea, 0xd7, 0xb1, 0xc2, 0xfd, 0x36, 0x26, + 0x14, 0x2e, 0x22, 0xec, 0x1f, 0xbc, 0xc2, 0xb5, 0xcb, 0xfb, 0xd1, 0x9e, 0xf2, 0x7e, 0x6c, 0x40, + 0x79, 0xdf, 0x2d, 0x07, 0xc6, 0x7b, 0xc8, 0x81, 0x6f, 0xa5, 0x2e, 0x49, 0x30, 0x21, 0x62, 0xe6, + 0x9e, 0xef, 0x29, 0x77, 0x93, 0x8c, 0x1d, 0x5e, 0x3a, 0x4c, 0x0e, 0x3a, 0x1d, 0xe6, 0x12, 0x64, + 0x32, 0xae, 0x80, 0x9f, 0x26, 0x29, 0x60, 0xc2, 0xcd, 0xa7, 0x45, 0xec, 0x5a, 0x32, 0x6d, 0xaa, + 0xff, 0x4c, 0x6b, 0xcd, 0xa3, 0xe3, 0x4c, 0xfb, 0x57, 0x12, 0x99, 0x16, 0x39, 0x66, 0x7c, 0xe8, + 0x99, 0x16, 0xae, 0x49, 0xab, 0xc7, 0xf5, 0x35, 0x29, 0xfc, 0x33, 0x0e, 0xce, 0x6e, 0xb3, 0x0a, + 0xfc, 0x41, 0x02, 0x99, 0xe8, 0xcb, 0xe6, 0x7b, 0x4b, 0x27, 0x7d, 0xcf, 0xbe, 0x14, 0x97, 0xb9, + 0xec, 0x66, 0x5a, 0x86, 0xba, 0x8d, 0xc2, 0xb6, 0xe8, 0x5b, 0x88, 0xfe, 0x6c, 0x8b, 0x30, 0xf4, + 0x69, 0x5b, 0xc2, 0xd9, 0x5b, 0xd8, 0x16, 0x3d, 0x78, 0xf7, 0x67, 0x5b, 0x84, 0xa1, 0x4f, 0xdb, + 0x12, 0xce, 0xce, 0xf0, 0x27, 0x09, 0xc0, 0x84, 0x83, 0xf3, 0xa3, 0xb4, 0x1b, 0x13, 0x12, 0x65, + 0x9f, 0x0e, 0x88, 0x28, 0x66, 0x70, 0x42, 0x1d, 0x7c, 0x94, 0x76, 0xb7, 0xd2, 0x19, 0xdc, 0x5e, + 0x51, 0x84, 0xc1, 0x09, 0x72, 0xf2, 0x28, 0xed, 0x16, 0xa6, 0x33, 0xb8, 0x7d, 0xba, 0x17, 0xbf, + 0xfa, 0xfd, 0x5d, 0x4e, 0x7a, 0xfb, 0x2e, 0x27, 0xfd, 0xf5, 0x2e, 0x27, 0x7d, 0xff, 0x3e, 0x77, + 0xe6, 0xed, 0xfb, 0xdc, 0x99, 0x3f, 0xdf, 0xe7, 0xce, 0xbc, 0x7a, 0x58, 0xc1, 0x7c, 0xb7, 0x5a, + 0x5a, 0x32, 0x88, 0x9d, 0xaf, 0x4d, 0xba, 0x98, 0xf4, 0xcd, 0x6b, 0xb1, 0x31, 0xed, 0x62, 0xf8, + 0xd5, 0xeb, 0xa0, 0xfe, 0x89, 0xee, 0xd0, 0x45, 0xac, 0x34, 0x26, 0xbe, 0x5f, 0xad, 0xfc, 0x17, + 0x00, 0x00, 0xff, 0xff, 0x27, 0xcf, 0x77, 0x07, 0xe3, 0x1b, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1394,7 +1396,7 @@ var _Msg_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "model/tx.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/model/tx.proto", } func (m *MsgCreateModel) Marshal() (dAtA []byte, err error) { diff --git a/x/model/types/vendor_products.pb.go b/x/model/types/vendor_products.pb.go index b49a67bf5..41bf1d9fb 100644 --- a/x/model/types/vendor_products.pb.go +++ b/x/model/types/vendor_products.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: model/vendor_products.proto +// source: zigbeealliance/distributedcomplianceledger/model/vendor_products.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -31,7 +31,7 @@ func (m *VendorProducts) Reset() { *m = VendorProducts{} } func (m *VendorProducts) String() string { return proto.CompactTextString(m) } func (*VendorProducts) ProtoMessage() {} func (*VendorProducts) Descriptor() ([]byte, []int) { - return fileDescriptor_1f57b2346708f393, []int{0} + return fileDescriptor_cccd801728ab4ad7, []int{0} } func (m *VendorProducts) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -78,25 +78,27 @@ func init() { proto.RegisterType((*VendorProducts)(nil), "zigbeealliance.distributedcomplianceledger.model.VendorProducts") } -func init() { proto.RegisterFile("model/vendor_products.proto", fileDescriptor_1f57b2346708f393) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/model/vendor_products.proto", fileDescriptor_cccd801728ab4ad7) +} -var fileDescriptor_1f57b2346708f393 = []byte{ - // 225 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xce, 0xcd, 0x4f, 0x49, - 0xcd, 0xd1, 0x2f, 0x4b, 0xcd, 0x4b, 0xc9, 0x2f, 0x8a, 0x2f, 0x28, 0xca, 0x4f, 0x29, 0x4d, 0x2e, - 0x29, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x32, 0xa8, 0xca, 0x4c, 0x4f, 0x4a, 0x4d, 0x4d, - 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, 0x2a, - 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, 0xa7, 0x16, 0xe9, - 0x81, 0xcd, 0x91, 0x12, 0x86, 0x18, 0x07, 0x35, 0x07, 0x62, 0x8c, 0x52, 0x25, 0x17, 0x5f, 0x18, - 0xd8, 0xfc, 0x00, 0xa8, 0xf1, 0x42, 0x02, 0x5c, 0xcc, 0x65, 0x99, 0x29, 0x12, 0x8c, 0x0a, 0x8c, - 0x1a, 0xac, 0x41, 0x20, 0xa6, 0x50, 0x28, 0x17, 0x07, 0xcc, 0x72, 0x09, 0x26, 0x05, 0x66, 0x0d, - 0x6e, 0x23, 0x4b, 0x3d, 0x52, 0x6d, 0xd7, 0x83, 0x9a, 0x1f, 0x04, 0x37, 0xca, 0x29, 0xe1, 0xc4, - 0x23, 0x39, 0xc6, 0x0b, 0x8f, 0xe4, 0x18, 0x1f, 0x3c, 0x92, 0x63, 0x9c, 0xf0, 0x58, 0x8e, 0xe1, - 0xc2, 0x63, 0x39, 0x86, 0x1b, 0x8f, 0xe5, 0x18, 0xa2, 0xdc, 0xd2, 0x33, 0x4b, 0x32, 0x4a, 0x93, - 0xf4, 0x92, 0xf3, 0x73, 0xf5, 0x21, 0x16, 0xe9, 0xc2, 0x6c, 0xd2, 0x47, 0xb2, 0x49, 0x17, 0x61, - 0x95, 0x2e, 0xc4, 0x2e, 0xfd, 0x0a, 0x7d, 0x88, 0x27, 0x4b, 0x2a, 0x0b, 0x52, 0x8b, 0x93, 0xd8, - 0xc0, 0x7e, 0x34, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x20, 0xa1, 0xf6, 0xb1, 0x49, 0x01, 0x00, - 0x00, +var fileDescriptor_cccd801728ab4ad7 = []byte{ + // 228 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0xab, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0xe7, 0xe6, 0xa7, 0xa4, 0xe6, 0xe8, 0x97, 0xa5, 0xe6, 0xa5, 0xe4, 0x17, 0xc5, + 0x17, 0x14, 0xe5, 0xa7, 0x94, 0x26, 0x97, 0x14, 0xeb, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x19, + 0xa0, 0x9a, 0xa3, 0x87, 0xc7, 0x1c, 0x3d, 0xb0, 0x39, 0x52, 0x76, 0x24, 0xdb, 0x0c, 0xb5, 0x12, + 0x62, 0xa3, 0x52, 0x25, 0x17, 0x5f, 0x18, 0xd8, 0x29, 0x01, 0x50, 0x97, 0x08, 0x09, 0x70, 0x31, + 0x97, 0x65, 0xa6, 0x48, 0x30, 0x2a, 0x30, 0x6a, 0xb0, 0x06, 0x81, 0x98, 0x42, 0xa1, 0x5c, 0x1c, + 0x30, 0x77, 0x4a, 0x30, 0x29, 0x30, 0x6b, 0x70, 0x1b, 0x59, 0xea, 0x91, 0xea, 0x50, 0x3d, 0xa8, + 0xf9, 0x41, 0x70, 0xa3, 0x9c, 0x12, 0x4e, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, + 0x23, 0x39, 0xc6, 0x09, 0x8f, 0xe5, 0x18, 0x2e, 0x3c, 0x96, 0x63, 0xb8, 0xf1, 0x58, 0x8e, 0x21, + 0xca, 0x2d, 0x3d, 0xb3, 0x24, 0xa3, 0x34, 0x49, 0x2f, 0x39, 0x3f, 0x57, 0x1f, 0x62, 0x91, 0x2e, + 0x36, 0x0f, 0xea, 0x22, 0xac, 0xd2, 0x85, 0x7a, 0xb1, 0x02, 0xea, 0xc9, 0x92, 0xca, 0x82, 0xd4, + 0xe2, 0x24, 0x36, 0xb0, 0x1f, 0x8d, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0xf6, 0xc0, 0x77, 0xf1, + 0x9f, 0x01, 0x00, 0x00, } func (m *VendorProducts) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/client/cli/query.go b/x/pki/client/cli/query.go index 514df8595..ace82b288 100644 --- a/x/pki/client/cli/query.go +++ b/x/pki/client/cli/query.go @@ -12,7 +12,7 @@ import ( ) // GetQueryCmd returns the cli query commands for this module. -func GetQueryCmd(queryRoute string) *cobra.Command { +func GetQueryCmd(_ string) *cobra.Command { // Group pki queries under a subcommand cmd := &cobra.Command{ Use: pkitypes.ModuleName, diff --git a/x/pki/client/cli/query_approved_certificates.go b/x/pki/client/cli/query_approved_certificates.go index 60b40a5e7..78341f962 100644 --- a/x/pki/client/cli/query_approved_certificates.go +++ b/x/pki/client/cli/query_approved_certificates.go @@ -16,7 +16,10 @@ func CmdListApprovedCertificates() *cobra.Command { Use: "all-x509-certs", Short: "Gets all certificates (root, intermediate and leaf)", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -59,7 +62,11 @@ func CmdShowApprovedCertificates() *cobra.Command { "by the given combination of subject and subject-key-id or just subject-key-id", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + if subject != "" { var res types.ApprovedCertificates diff --git a/x/pki/client/cli/query_approved_certificates_by_subject.go b/x/pki/client/cli/query_approved_certificates_by_subject.go index 3dd8ea378..02c015a4a 100644 --- a/x/pki/client/cli/query_approved_certificates_by_subject.go +++ b/x/pki/client/cli/query_approved_certificates_by_subject.go @@ -17,7 +17,11 @@ func CmdShowApprovedCertificatesBySubject() *cobra.Command { Short: "Gets all certificates (root, intermediate and leaf) associated with subject", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + var res types.ApprovedCertificatesBySubject return cli.QueryWithProof( diff --git a/x/pki/client/cli/query_approved_certificates_by_subject_test.go b/x/pki/client/cli/query_approved_certificates_by_subject_test.go index b7b1f89f3..67ecda36e 100644 --- a/x/pki/client/cli/query_approved_certificates_by_subject_test.go +++ b/x/pki/client/cli/query_approved_certificates_by_subject_test.go @@ -8,7 +8,7 @@ import ( clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/client/cli" diff --git a/x/pki/client/cli/query_approved_certificates_test.go b/x/pki/client/cli/query_approved_certificates_test.go index f9c1994e9..5b49d2d65 100644 --- a/x/pki/client/cli/query_approved_certificates_test.go +++ b/x/pki/client/cli/query_approved_certificates_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/client/cli" diff --git a/x/pki/client/cli/query_approved_root_certificates.go b/x/pki/client/cli/query_approved_root_certificates.go index d15b3aeb0..a5f04fa0b 100644 --- a/x/pki/client/cli/query_approved_root_certificates.go +++ b/x/pki/client/cli/query_approved_root_certificates.go @@ -15,7 +15,10 @@ func CmdShowApprovedRootCertificates() *cobra.Command { Short: "Gets all approved root certificates", Args: cobra.NoArgs, RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.ApprovedRootCertificates return cli.QueryWithProofList( diff --git a/x/pki/client/cli/query_approved_root_certificates_test.go b/x/pki/client/cli/query_approved_root_certificates_test.go index 97cae8b2b..0c99dc059 100644 --- a/x/pki/client/cli/query_approved_root_certificates_test.go +++ b/x/pki/client/cli/query_approved_root_certificates_test.go @@ -7,7 +7,7 @@ import ( clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/client/cli" diff --git a/x/pki/client/cli/query_child_certificates.go b/x/pki/client/cli/query_child_certificates.go index df8ee3bb5..2d29ffd3e 100644 --- a/x/pki/client/cli/query_child_certificates.go +++ b/x/pki/client/cli/query_child_certificates.go @@ -20,7 +20,10 @@ func CmdShowChildCertificates() *cobra.Command { Short: "Gets all child certificates for the given certificate", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.ChildCertificates return cli.QueryWithProof( diff --git a/x/pki/client/cli/query_child_certificates_test.go b/x/pki/client/cli/query_child_certificates_test.go index c70a60356..cfff7c983 100644 --- a/x/pki/client/cli/query_child_certificates_test.go +++ b/x/pki/client/cli/query_child_certificates_test.go @@ -8,7 +8,7 @@ import ( clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/client/cli" diff --git a/x/pki/client/cli/query_pki_revocation_distribution_point.go b/x/pki/client/cli/query_pki_revocation_distribution_point.go index c8621a442..a61c1e22c 100644 --- a/x/pki/client/cli/query_pki_revocation_distribution_point.go +++ b/x/pki/client/cli/query_pki_revocation_distribution_point.go @@ -16,7 +16,10 @@ func CmdListPkiRevocationDistributionPoint() *cobra.Command { Use: "all-revocation-points", Short: "Gets all PKI revocation distribution points", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -59,7 +62,10 @@ func CmdShowPkiRevocationDistributionPoint() *cobra.Command { Short: "Gets a PKI revocation distribution point", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.PkiRevocationDistributionPoint diff --git a/x/pki/client/cli/query_pki_revocation_distribution_point_test.go b/x/pki/client/cli/query_pki_revocation_distribution_point_test.go index 9cd7fe817..dccaea226 100644 --- a/x/pki/client/cli/query_pki_revocation_distribution_point_test.go +++ b/x/pki/client/cli/query_pki_revocation_distribution_point_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/x/pki/client/cli/query_pki_revocation_distribution_points_by_issuer_subject_key_id.go b/x/pki/client/cli/query_pki_revocation_distribution_points_by_issuer_subject_key_id.go index 55eb9584e..198fad6c1 100644 --- a/x/pki/client/cli/query_pki_revocation_distribution_points_by_issuer_subject_key_id.go +++ b/x/pki/client/cli/query_pki_revocation_distribution_points_by_issuer_subject_key_id.go @@ -17,7 +17,10 @@ func CmdShowPkiRevocationDistributionPointsByIssuerSubjectKeyID() *cobra.Command Short: "Gets all revocation points associated with issuer's subject key id", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.PkiRevocationDistributionPointsByIssuerSubjectKeyID return cli.QueryWithProof( diff --git a/x/pki/client/cli/query_pki_revocation_distribution_points_by_issuer_subject_key_id_test.go b/x/pki/client/cli/query_pki_revocation_distribution_points_by_issuer_subject_key_id_test.go index 6e565979c..05559fd79 100644 --- a/x/pki/client/cli/query_pki_revocation_distribution_points_by_issuer_subject_key_id_test.go +++ b/x/pki/client/cli/query_pki_revocation_distribution_points_by_issuer_subject_key_id_test.go @@ -8,7 +8,7 @@ import ( clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/x/pki/client/cli/query_proposed_certificate.go b/x/pki/client/cli/query_proposed_certificate.go index 5208aafe9..c6657e9a6 100644 --- a/x/pki/client/cli/query_proposed_certificate.go +++ b/x/pki/client/cli/query_proposed_certificate.go @@ -16,8 +16,10 @@ func CmdListProposedCertificate() *cobra.Command { Use: "all-proposed-x509-root-certs", Short: "Gets all proposed but not approved root certificates", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) - + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { return err @@ -58,7 +60,10 @@ func CmdShowProposedCertificate() *cobra.Command { Short: "Gets a proposed but not approved root certificate with the given combination of subject and subject-key-id", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.ProposedCertificate return cli.QueryWithProof( diff --git a/x/pki/client/cli/query_proposed_certificate_revocation.go b/x/pki/client/cli/query_proposed_certificate_revocation.go index 68ccf31b1..0885dca1d 100644 --- a/x/pki/client/cli/query_proposed_certificate_revocation.go +++ b/x/pki/client/cli/query_proposed_certificate_revocation.go @@ -16,7 +16,10 @@ func CmdListProposedCertificateRevocation() *cobra.Command { Use: "all-proposed-x509-root-certs-to-revoke", Short: "Gets all proposed but not approved root certificates to be revoked", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -60,7 +63,10 @@ func CmdShowProposedCertificateRevocation() *cobra.Command { "with the given combination of subject and subject-key-id", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.ProposedCertificateRevocation return cli.QueryWithProof( diff --git a/x/pki/client/cli/query_proposed_certificate_revocation_test.go b/x/pki/client/cli/query_proposed_certificate_revocation_test.go index f4723b311..30f30a7b3 100644 --- a/x/pki/client/cli/query_proposed_certificate_revocation_test.go +++ b/x/pki/client/cli/query_proposed_certificate_revocation_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/client/cli" diff --git a/x/pki/client/cli/query_proposed_certificate_test.go b/x/pki/client/cli/query_proposed_certificate_test.go index ae171d2c9..dc64814a8 100644 --- a/x/pki/client/cli/query_proposed_certificate_test.go +++ b/x/pki/client/cli/query_proposed_certificate_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/client/cli" diff --git a/x/pki/client/cli/query_rejected_certificate.go b/x/pki/client/cli/query_rejected_certificate.go index 00381ff06..f40fc4c40 100644 --- a/x/pki/client/cli/query_rejected_certificate.go +++ b/x/pki/client/cli/query_rejected_certificate.go @@ -16,7 +16,10 @@ func CmdListRejectedCertificate() *cobra.Command { Use: "all-rejected-x509-root-certs", Short: "list all RejectedCertificate", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -55,7 +58,10 @@ func CmdShowRejectedCertificate() *cobra.Command { Short: "shows a RejectedCertificate", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.RejectedCertificate return cli.QueryWithProof( diff --git a/x/pki/client/cli/query_rejected_certificate_test.go b/x/pki/client/cli/query_rejected_certificate_test.go index c438d1152..9b30449c0 100644 --- a/x/pki/client/cli/query_rejected_certificate_test.go +++ b/x/pki/client/cli/query_rejected_certificate_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/x/pki/client/cli/query_revoked_certificates.go b/x/pki/client/cli/query_revoked_certificates.go index 05c8b4f18..17b7cb730 100644 --- a/x/pki/client/cli/query_revoked_certificates.go +++ b/x/pki/client/cli/query_revoked_certificates.go @@ -16,7 +16,10 @@ func CmdListRevokedCertificates() *cobra.Command { Use: "all-revoked-x509-certs", Short: "Gets all revoked certificates (root, intermediate and leaf)", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -59,7 +62,10 @@ func CmdShowRevokedCertificates() *cobra.Command { "by the given combination of subject and subject-key-id", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.RevokedCertificates return cli.QueryWithProof( diff --git a/x/pki/client/cli/query_revoked_certificates_test.go b/x/pki/client/cli/query_revoked_certificates_test.go index 9fe05caac..9565a1e12 100644 --- a/x/pki/client/cli/query_revoked_certificates_test.go +++ b/x/pki/client/cli/query_revoked_certificates_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/client/cli" diff --git a/x/pki/client/cli/query_revoked_root_certificates.go b/x/pki/client/cli/query_revoked_root_certificates.go index 35b5613a9..b943694c1 100644 --- a/x/pki/client/cli/query_revoked_root_certificates.go +++ b/x/pki/client/cli/query_revoked_root_certificates.go @@ -15,7 +15,10 @@ func CmdShowRevokedRootCertificates() *cobra.Command { Short: "Gets all revoked root certificates", Args: cobra.NoArgs, RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.RevokedRootCertificates return cli.QueryWithProofList( diff --git a/x/pki/client/cli/query_revoked_root_certificates_test.go b/x/pki/client/cli/query_revoked_root_certificates_test.go index 4b679ec88..81aab2d97 100644 --- a/x/pki/client/cli/query_revoked_root_certificates_test.go +++ b/x/pki/client/cli/query_revoked_root_certificates_test.go @@ -7,7 +7,7 @@ import ( clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/nullify" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/client/cli" diff --git a/x/pki/handler.go b/x/pki/handler.go index 8f1d04fe0..8f82eecce 100644 --- a/x/pki/handler.go +++ b/x/pki/handler.go @@ -3,10 +3,11 @@ package pki import ( "fmt" - pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" - + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + + pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types" ) @@ -87,7 +88,7 @@ func NewHandler(k keeper.Keeper) sdk.Handler { default: errMsg := fmt.Sprintf("unrecognized %s message type: %T", pkitypes.ModuleName, msg) - return nil, sdkerrors.Wrap(sdkerrors.ErrUnknownRequest, errMsg) + return nil, errors.Wrap(sdkerrors.ErrUnknownRequest, errMsg) } } } diff --git a/x/pki/handler_update_revocation_test.go b/x/pki/handler_update_revocation_test.go index 9208f0cd7..0875b9532 100644 --- a/x/pki/handler_update_revocation_test.go +++ b/x/pki/handler_update_revocation_test.go @@ -628,6 +628,7 @@ func TestHandler_UpdatePkiRevocationDistributionPoint_PAI_VIDPID(t *testing.T) { } func compareUpdatedStringFields(t *testing.T, oldValue string, newValue string, updatedValue string) { + t.Helper() if newValue == "" { require.Equal(t, oldValue, updatedValue) } else { @@ -636,6 +637,7 @@ func compareUpdatedStringFields(t *testing.T, oldValue string, newValue string, } func compareUpdatedIntFields(t *testing.T, oldValue int, newValue int, updatedValue int) { + t.Helper() if newValue == 0 { require.Equal(t, oldValue, updatedValue) } else { diff --git a/x/pki/keeper/approved_root_certificates_test.go b/x/pki/keeper/approved_root_certificates_test.go index 159ee7962..7fa301ffa 100644 --- a/x/pki/keeper/approved_root_certificates_test.go +++ b/x/pki/keeper/approved_root_certificates_test.go @@ -11,7 +11,7 @@ import ( "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types" ) -func createTestApprovedRootCertificates(keeper *keeper.Keeper, ctx sdk.Context) types.ApprovedRootCertificates { +func createTestApprovedRootCertificates(keeper *keeper.Keeper, ctx sdk.Context) types.ApprovedRootCertificates { //nolint:unparam item := types.ApprovedRootCertificates{} keeper.SetApprovedRootCertificates(ctx, item) diff --git a/x/pki/keeper/keeper.go b/x/pki/keeper/keeper.go index eaa27c9d0..312571cc9 100644 --- a/x/pki/keeper/keeper.go +++ b/x/pki/keeper/keeper.go @@ -4,9 +4,10 @@ import ( "fmt" "math" + "github.com/cometbft/cometbft/libs/log" "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/tendermint/tendermint/libs/log" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" authTypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types" @@ -15,8 +16,8 @@ import ( type ( Keeper struct { cdc codec.BinaryCodec - storeKey sdk.StoreKey - memKey sdk.StoreKey + storeKey storetypes.StoreKey + memKey storetypes.StoreKey dclauthKeeper types.DclauthKeeper } @@ -26,7 +27,7 @@ type ( func NewKeeper( cdc codec.BinaryCodec, storeKey, - memKey sdk.StoreKey, + memKey storetypes.StoreKey, dclauthKeeper types.DclauthKeeper, ) *Keeper { diff --git a/x/pki/keeper/msg_server_approve_add_x_509_root_cert.go b/x/pki/keeper/msg_server_approve_add_x_509_root_cert.go index fff20b3c8..2d5a84cf0 100644 --- a/x/pki/keeper/msg_server_approve_add_x_509_root_cert.go +++ b/x/pki/keeper/msg_server_approve_add_x_509_root_cert.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -14,12 +15,12 @@ func (k msgServer) ApproveAddX509RootCert(goCtx context.Context, msg *types.MsgA signerAddr, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if signer has root certificate approval role if !k.dclauthKeeper.HasRole(ctx, signerAddr, types.RootCertificateApprovalRole) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgApproveAddX509RootCert transaction should be signed by an account with the \"%s\" role", types.RootCertificateApprovalRole, ) @@ -33,7 +34,7 @@ func (k msgServer) ApproveAddX509RootCert(goCtx context.Context, msg *types.MsgA // check if proposed certificate already has approval form signer if proposedCertificate.HasApprovalFrom(signerAddr.String()) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "Certificate associated with subject=%v and subjectKeyID=%v combination "+ "already has approval from=%v", msg.Subject, msg.SubjectKeyId, msg.Signer, diff --git a/x/pki/keeper/msg_server_approve_revoke_x_509_root_cert.go b/x/pki/keeper/msg_server_approve_revoke_x_509_root_cert.go index 42b55bfdf..6f4af725d 100644 --- a/x/pki/keeper/msg_server_approve_revoke_x_509_root_cert.go +++ b/x/pki/keeper/msg_server_approve_revoke_x_509_root_cert.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -15,10 +16,10 @@ func (k msgServer) ApproveRevokeX509RootCert(goCtx context.Context, msg *types.M // check if signer has root certificate approval role signerAddr, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } if !k.dclauthKeeper.HasRole(ctx, signerAddr, types.RootCertificateApprovalRole) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgApproveRevokeX509RootCert transaction should be signed by "+ "an account with the \"%s\" role", types.RootCertificateApprovalRole, @@ -33,7 +34,7 @@ func (k msgServer) ApproveRevokeX509RootCert(goCtx context.Context, msg *types.M // check if proposed certificate revocation already has approval form signer if revocation.HasApprovalFrom(signerAddr.String()) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "Certificate revocation associated with subject=%v and subjectKeyID=%v combination "+ "already has approval from=%v", msg.Subject, msg.SubjectKeyId, msg.Signer, diff --git a/x/pki/keeper/msg_server_assign_vid.go b/x/pki/keeper/msg_server_assign_vid.go index baada16b4..ec7080b8a 100644 --- a/x/pki/keeper/msg_server_assign_vid.go +++ b/x/pki/keeper/msg_server_assign_vid.go @@ -4,6 +4,7 @@ import ( "context" "fmt" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -17,12 +18,12 @@ func (k msgServer) AssignVid(goCtx context.Context, msg *types.MsgAssignVid) (*t signerAddr, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if signer has vendor admin role if !k.dclauthKeeper.HasRole(ctx, signerAddr, dclauthtypes.VendorAdmin) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "AssignVid transaction should be signed by an account with the \"%s\" role", dclauthtypes.VendorAdmin, ) } diff --git a/x/pki/keeper/msg_server_propose_add_x_509_root_cert.go b/x/pki/keeper/msg_server_propose_add_x_509_root_cert.go index 7cbe1382e..7b93a58fe 100644 --- a/x/pki/keeper/msg_server_propose_add_x_509_root_cert.go +++ b/x/pki/keeper/msg_server_propose_add_x_509_root_cert.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -15,12 +16,12 @@ func (k msgServer) ProposeAddX509RootCert(goCtx context.Context, msg *types.MsgP signerAddr, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if sender has enough rights to propose a x509 root cert if !k.dclauthKeeper.HasRole(ctx, signerAddr, types.RootCertificateApprovalRole) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgProposeAddX509RootCert transaction should be signed by an account with the %s role", types.RootCertificateApprovalRole, ) diff --git a/x/pki/keeper/msg_server_propose_revoke_x_509_root_cert.go b/x/pki/keeper/msg_server_propose_revoke_x_509_root_cert.go index 25e43d0a2..5606c4392 100644 --- a/x/pki/keeper/msg_server_propose_revoke_x_509_root_cert.go +++ b/x/pki/keeper/msg_server_propose_revoke_x_509_root_cert.go @@ -4,6 +4,7 @@ import ( "context" "fmt" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -16,10 +17,10 @@ func (k msgServer) ProposeRevokeX509RootCert(goCtx context.Context, msg *types.M // check if signer has root certificate approval role signerAddr, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } if !k.dclauthKeeper.HasRole(ctx, signerAddr, types.RootCertificateApprovalRole) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgProposeRevokeX509RootCert transaction should be signed by "+ "an account with the \"%s\" role", types.RootCertificateApprovalRole, diff --git a/x/pki/keeper/msg_server_reject_add_x_509_root_cert.go b/x/pki/keeper/msg_server_reject_add_x_509_root_cert.go index 28c7ae866..dbf5b2a50 100644 --- a/x/pki/keeper/msg_server_reject_add_x_509_root_cert.go +++ b/x/pki/keeper/msg_server_reject_add_x_509_root_cert.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -14,12 +15,12 @@ func (k msgServer) RejectAddX509RootCert(goCtx context.Context, msg *types.MsgRe signerAddr, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if signer has root certificate approval role if !k.dclauthKeeper.HasRole(ctx, signerAddr, types.RootCertificateApprovalRole) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgApproveAddX509RootCert transaction should be signed by an account with the \"%s\" role", types.RootCertificateApprovalRole, ) @@ -33,7 +34,7 @@ func (k msgServer) RejectAddX509RootCert(goCtx context.Context, msg *types.MsgRe // check if proposed certificate already has reject approval form signer if proposedCertificate.HasRejectFrom(signerAddr.String()) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "Certificate associated with subject=%v and subjectKeyID=%v combination "+ "already has reject approval from=%v", msg.Subject, msg.SubjectKeyId, msg.Signer, diff --git a/x/pki/keeper/msg_server_test.go b/x/pki/keeper/msg_server_test.go index e990b9394..528a05540 100644 --- a/x/pki/keeper/msg_server_test.go +++ b/x/pki/keeper/msg_server_test.go @@ -10,7 +10,7 @@ import ( "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types" ) -//nolint:deadcode,unused +//nolint:unused func setupMsgServer(tb testing.TB) (types.MsgServer, context.Context) { tb.Helper() k, ctx := keepertest.PkiKeeper(tb, nil) diff --git a/x/pki/keeper/revoked_root_certificates_test.go b/x/pki/keeper/revoked_root_certificates_test.go index 719d28ea4..653c25676 100644 --- a/x/pki/keeper/revoked_root_certificates_test.go +++ b/x/pki/keeper/revoked_root_certificates_test.go @@ -11,7 +11,7 @@ import ( "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/types" ) -func createTestRevokedRootCertificates(keeper *keeper.Keeper, ctx sdk.Context) types.RevokedRootCertificates { +func createTestRevokedRootCertificates(keeper *keeper.Keeper, ctx sdk.Context) types.RevokedRootCertificates { //nolint:unparam item := types.RevokedRootCertificates{} keeper.SetRevokedRootCertificates(ctx, item) diff --git a/x/pki/module.go b/x/pki/module.go index 8044b600f..2d2fe2571 100644 --- a/x/pki/module.go +++ b/x/pki/module.go @@ -5,6 +5,7 @@ import ( "encoding/json" "fmt" + abci "github.com/cometbft/cometbft/abci/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/codec" cdctypes "github.com/cosmos/cosmos-sdk/codec/types" @@ -13,7 +14,6 @@ import ( "github.com/gorilla/mux" "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" - abci "github.com/tendermint/tendermint/abci/types" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/client/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/x/pki/keeper" @@ -62,7 +62,7 @@ func (AppModuleBasic) DefaultGenesis(cdc codec.JSONCodec) json.RawMessage { } // ValidateGenesis performs genesis state validation for the pki module. -func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncodingConfig, bz json.RawMessage) error { +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, _ client.TxEncodingConfig, bz json.RawMessage) error { var genState types.GenesisState if err := cdc.UnmarshalJSON(bz, &genState); err != nil { return fmt.Errorf("failed to unmarshal %s genesis state: %w", pkitypes.ModuleName, err) @@ -72,7 +72,7 @@ func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncod } // RegisterRESTRoutes registers the pki module's REST service handlers. -func (AppModuleBasic) RegisterRESTRoutes(clientCtx client.Context, rtr *mux.Router) { +func (AppModuleBasic) RegisterRESTRoutes(_ client.Context, _ *mux.Router) { } // RegisterGRPCGatewayRoutes registers the gRPC Gateway routes for the module. @@ -113,19 +113,6 @@ func (am AppModule) Name() string { return am.AppModuleBasic.Name() } -// Route returns the pki module's message routing key. -func (am AppModule) Route() sdk.Route { - return sdk.NewRoute(pkitypes.RouterKey, NewHandler(am.keeper)) -} - -// QuerierRoute returns the pki module's query routing key. -func (AppModule) QuerierRoute() string { return pkitypes.QuerierRoute } - -// LegacyQuerierHandler returns the pki module's Querier. -func (am AppModule) LegacyQuerierHandler(legacyQuerierCdc *codec.LegacyAmino) sdk.Querier { - return nil -} - // RegisterServices registers a GRPC query service to respond to the // module-specific GRPC queries. func (am AppModule) RegisterServices(cfg module.Configurator) { diff --git a/x/pki/module_simulation.go b/x/pki/module_simulation.go index b13082f41..d8ccf1755 100644 --- a/x/pki/module_simulation.go +++ b/x/pki/module_simulation.go @@ -4,7 +4,6 @@ import ( "math/rand" "github.com/cosmos/cosmos-sdk/baseapp" - simappparams "github.com/cosmos/cosmos-sdk/simapp/params" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/module" simtypes "github.com/cosmos/cosmos-sdk/types/simulation" @@ -19,7 +18,6 @@ import ( var ( _ = sample.AccAddress _ = pkisimulation.FindAccount - _ = simappparams.StakePerAccount _ = simulation.MsgEntryKind _ = baseapp.Paramspace ) @@ -105,15 +103,10 @@ func (AppModule) GenerateGenesisState(simState *module.SimulationState) { } // ProposalContents doesn't return any content functions for governance proposals. -func (AppModule) ProposalContents(_ module.SimulationState) []simtypes.WeightedProposalContent { +func (AppModule) ProposalContents(_ module.SimulationState) []simtypes.WeightedProposalContent { //nolint:staticcheck return nil } -// RandomizedParams creates randomized param changes for the simulator. -func (am AppModule) RandomizedParams(_ *rand.Rand) []simtypes.ParamChange { - return []simtypes.ParamChange{} -} - // RegisterStoreDecoder registers a decoder. func (am AppModule) RegisterStoreDecoder(_ sdk.StoreDecoderRegistry) {} diff --git a/x/pki/revocation_message_utils_test.go b/x/pki/revocation_message_utils_test.go index bc4cd8782..8f9e0b108 100644 --- a/x/pki/revocation_message_utils_test.go +++ b/x/pki/revocation_message_utils_test.go @@ -81,6 +81,7 @@ func createAddRevocationMessageWithLeafCertWithVid(signer string) *types.MsgAddP } func assertRevocationPointEqual(t *testing.T, expected *types.MsgAddPkiRevocationDistributionPoint, actual *types.PkiRevocationDistributionPoint) { + t.Helper() require.Equal(t, expected.CrlSignerCertificate, actual.CrlSignerCertificate) require.Equal(t, expected.CrlSignerCertificate, actual.CrlSignerCertificate) require.Equal(t, expected.DataDigest, actual.DataDigest) diff --git a/x/pki/simulation/add_pki_revocation_distribution_point.go b/x/pki/simulation/add_pki_revocation_distribution_point.go index 5dc6753ed..83344c657 100644 --- a/x/pki/simulation/add_pki_revocation_distribution_point.go +++ b/x/pki/simulation/add_pki_revocation_distribution_point.go @@ -12,7 +12,7 @@ import ( ) func SimulateMsgAddPkiRevocationDistributionPoint( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/pki/simulation/add_x_509_cert.go b/x/pki/simulation/add_x_509_cert.go index 22e3a964b..ff3097e91 100644 --- a/x/pki/simulation/add_x_509_cert.go +++ b/x/pki/simulation/add_x_509_cert.go @@ -12,7 +12,7 @@ import ( ) func SimulateMsgAddX509Cert( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/pki/simulation/approve_add_x_509_root_cert.go b/x/pki/simulation/approve_add_x_509_root_cert.go index 3dba479eb..381f2a79f 100644 --- a/x/pki/simulation/approve_add_x_509_root_cert.go +++ b/x/pki/simulation/approve_add_x_509_root_cert.go @@ -12,7 +12,7 @@ import ( ) func SimulateMsgApproveAddX509RootCert( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/pki/simulation/approve_revoke_x_509_root_cert.go b/x/pki/simulation/approve_revoke_x_509_root_cert.go index 5ff7dfa25..8f23f314e 100644 --- a/x/pki/simulation/approve_revoke_x_509_root_cert.go +++ b/x/pki/simulation/approve_revoke_x_509_root_cert.go @@ -12,7 +12,7 @@ import ( ) func SimulateMsgApproveRevokeX509RootCert( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/pki/simulation/assign_vid.go b/x/pki/simulation/assign_vid.go index a6cffddde..3093fe433 100644 --- a/x/pki/simulation/assign_vid.go +++ b/x/pki/simulation/assign_vid.go @@ -12,7 +12,7 @@ import ( ) func SimulateMsgAssignVid( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/pki/simulation/delete_pki_revocation_distribution_point.go b/x/pki/simulation/delete_pki_revocation_distribution_point.go index fc6daff09..63d85590e 100644 --- a/x/pki/simulation/delete_pki_revocation_distribution_point.go +++ b/x/pki/simulation/delete_pki_revocation_distribution_point.go @@ -12,7 +12,7 @@ import ( ) func SimulateMsgDeletePkiRevocationDistributionPoint( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/pki/simulation/propose_add_x_509_root_cert.go b/x/pki/simulation/propose_add_x_509_root_cert.go index e61784dd3..577a3a5aa 100644 --- a/x/pki/simulation/propose_add_x_509_root_cert.go +++ b/x/pki/simulation/propose_add_x_509_root_cert.go @@ -12,7 +12,7 @@ import ( ) func SimulateMsgProposeAddX509RootCert( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/pki/simulation/propose_revoke_x_509_root_cert.go b/x/pki/simulation/propose_revoke_x_509_root_cert.go index 7590b97cc..89173a340 100644 --- a/x/pki/simulation/propose_revoke_x_509_root_cert.go +++ b/x/pki/simulation/propose_revoke_x_509_root_cert.go @@ -12,7 +12,7 @@ import ( ) func SimulateMsgProposeRevokeX509RootCert( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/pki/simulation/reject_add_x_509_root_cert.go b/x/pki/simulation/reject_add_x_509_root_cert.go index 61577dc53..01d74cb72 100644 --- a/x/pki/simulation/reject_add_x_509_root_cert.go +++ b/x/pki/simulation/reject_add_x_509_root_cert.go @@ -12,7 +12,7 @@ import ( ) func SimulateMsgRejectAddX509RootCert( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/pki/simulation/revoke_x_509_cert.go b/x/pki/simulation/revoke_x_509_cert.go index a9dfe353b..74f416835 100644 --- a/x/pki/simulation/revoke_x_509_cert.go +++ b/x/pki/simulation/revoke_x_509_cert.go @@ -12,7 +12,7 @@ import ( ) func SimulateMsgRevokeX509Cert( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/pki/simulation/update_pki_revocation_distribution_point.go b/x/pki/simulation/update_pki_revocation_distribution_point.go index e00f459d3..af10edef4 100644 --- a/x/pki/simulation/update_pki_revocation_distribution_point.go +++ b/x/pki/simulation/update_pki_revocation_distribution_point.go @@ -12,7 +12,7 @@ import ( ) func SimulateMsgUpdatePkiRevocationDistributionPoint( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/pki/types/approved_certificates.pb.go b/x/pki/types/approved_certificates.pb.go index 45912880f..d6cfac195 100644 --- a/x/pki/types/approved_certificates.pb.go +++ b/x/pki/types/approved_certificates.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/approved_certificates.proto +// source: zigbeealliance/distributedcomplianceledger/pki/approved_certificates.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -34,7 +34,7 @@ func (m *ApprovedCertificates) Reset() { *m = ApprovedCertificates{} } func (m *ApprovedCertificates) String() string { return proto.CompactTextString(m) } func (*ApprovedCertificates) ProtoMessage() {} func (*ApprovedCertificates) Descriptor() ([]byte, []int) { - return fileDescriptor_3ce3f4f36edbe99d, []int{0} + return fileDescriptor_6269eb4c76315a98, []int{0} } func (m *ApprovedCertificates) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -95,29 +95,31 @@ func init() { proto.RegisterType((*ApprovedCertificates)(nil), "zigbeealliance.distributedcomplianceledger.pki.ApprovedCertificates") } -func init() { proto.RegisterFile("pki/approved_certificates.proto", fileDescriptor_3ce3f4f36edbe99d) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/approved_certificates.proto", fileDescriptor_6269eb4c76315a98) +} -var fileDescriptor_3ce3f4f36edbe99d = []byte{ - // 290 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x90, 0xbb, 0x4e, 0xc3, 0x30, - 0x14, 0x86, 0x6b, 0xca, 0x45, 0x18, 0xba, 0x44, 0x45, 0x8a, 0x3a, 0x98, 0xaa, 0x62, 0xc8, 0x12, - 0x5b, 0x82, 0x91, 0x89, 0xcb, 0x82, 0x98, 0xc8, 0xc0, 0xc0, 0x00, 0x4a, 0x9c, 0x43, 0x6a, 0x9a, - 0xd4, 0x96, 0xed, 0x20, 0xca, 0x53, 0xf0, 0x58, 0x2c, 0x48, 0x1d, 0x19, 0x51, 0xf2, 0x22, 0x28, - 0x71, 0x2b, 0xd2, 0x91, 0xed, 0xf7, 0x6f, 0xe9, 0x3b, 0xdf, 0x39, 0xf8, 0x58, 0xcd, 0x04, 0x8b, - 0x95, 0xd2, 0xf2, 0x15, 0xd2, 0x27, 0x0e, 0xda, 0x8a, 0x67, 0xc1, 0x63, 0x0b, 0x86, 0x2a, 0x2d, - 0xad, 0xf4, 0xe8, 0xbb, 0xc8, 0x12, 0x80, 0x38, 0xcf, 0x45, 0x3c, 0xe7, 0x40, 0x53, 0x61, 0xac, - 0x16, 0x49, 0x69, 0x21, 0xe5, 0xb2, 0x50, 0xae, 0xcd, 0x21, 0xcd, 0x40, 0x53, 0x35, 0x13, 0xa3, - 0xa3, 0x06, 0xd8, 0xe1, 0x38, 0xcc, 0x68, 0x98, 0xc9, 0x4c, 0xb6, 0x91, 0x35, 0xc9, 0xb5, 0x93, - 0x2f, 0x84, 0x87, 0x17, 0xab, 0xe1, 0x57, 0x9d, 0xd9, 0x9e, 0x8f, 0xf7, 0x4c, 0x99, 0xbc, 0x00, - 0xb7, 0x3e, 0x1a, 0xa3, 0x60, 0x3f, 0x5a, 0x3f, 0xbd, 0x09, 0x3e, 0x5c, 0xc5, 0x5b, 0x58, 0xdc, - 0xa4, 0xfe, 0x56, 0xfb, 0xbd, 0xd1, 0x79, 0x77, 0x78, 0xa7, 0x31, 0x30, 0x7e, 0x7f, 0xdc, 0x0f, - 0x0e, 0x4e, 0xcf, 0xff, 0xb9, 0x03, 0xed, 0xa8, 0x44, 0x8e, 0xe4, 0x9d, 0xe0, 0x81, 0xe1, 0x53, - 0x28, 0xe2, 0x7b, 0xd0, 0x46, 0xc8, 0xb9, 0xbf, 0x3d, 0x46, 0xc1, 0x20, 0xda, 0x2c, 0x2f, 0x1f, - 0x3f, 0x2b, 0x82, 0x96, 0x15, 0x41, 0x3f, 0x15, 0x41, 0x1f, 0x35, 0xe9, 0x2d, 0x6b, 0xd2, 0xfb, - 0xae, 0x49, 0xef, 0xe1, 0x3a, 0x13, 0x76, 0x5a, 0x26, 0x94, 0xcb, 0x82, 0x39, 0x9b, 0x70, 0xad, - 0xc3, 0x3a, 0x3a, 0xe1, 0x9f, 0x4f, 0xe8, 0x84, 0xd8, 0x1b, 0x6b, 0x2e, 0x6a, 0x17, 0x0a, 0x4c, - 0xb2, 0xdb, 0x9e, 0xed, 0xec, 0x37, 0x00, 0x00, 0xff, 0xff, 0xa4, 0xbd, 0xbb, 0x30, 0xb6, 0x01, - 0x00, 0x00, +var fileDescriptor_6269eb4c76315a98 = []byte{ + // 294 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xf2, 0xaa, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x17, 0x64, 0x67, 0xea, 0x27, 0x16, 0x14, 0x14, 0xe5, 0x97, 0xa5, 0xa6, 0xc4, + 0x27, 0xa7, 0x16, 0x95, 0x64, 0xa6, 0x65, 0x26, 0x27, 0x96, 0xa4, 0x16, 0xeb, 0x15, 0x14, 0xe5, + 0x97, 0xe4, 0x0b, 0xe9, 0xa1, 0x9a, 0xa5, 0x87, 0xc7, 0x2c, 0xbd, 0x82, 0xec, 0x4c, 0x29, 0x07, + 0x12, 0xed, 0x46, 0xb2, 0x12, 0x62, 0xa3, 0x94, 0x48, 0x7a, 0x7e, 0x7a, 0x3e, 0x98, 0xa9, 0x0f, + 0x62, 0x41, 0x44, 0x95, 0xce, 0x32, 0x72, 0x89, 0x38, 0x42, 0xdd, 0xe9, 0x8c, 0xe4, 0x4c, 0x21, + 0x09, 0x2e, 0xf6, 0xe2, 0xd2, 0xa4, 0xac, 0xd4, 0xe4, 0x12, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, + 0x20, 0x18, 0x57, 0x48, 0x89, 0x8b, 0x07, 0xca, 0xf4, 0x4e, 0xad, 0xf4, 0x4c, 0x91, 0x60, 0x02, + 0x4b, 0xa3, 0x88, 0x09, 0x05, 0x72, 0xb1, 0x82, 0x5c, 0x50, 0x2c, 0xc1, 0xac, 0xc0, 0xac, 0xc1, + 0x6d, 0x64, 0x4d, 0xa2, 0x77, 0xf5, 0x90, 0x9c, 0x12, 0x04, 0x31, 0x49, 0x48, 0x85, 0x8b, 0xb7, + 0x38, 0x39, 0x23, 0x35, 0x37, 0x31, 0x2c, 0xb5, 0xa8, 0x38, 0x33, 0x3f, 0x4f, 0x82, 0x45, 0x81, + 0x51, 0x83, 0x37, 0x08, 0x55, 0xd0, 0x29, 0xee, 0xc4, 0x23, 0x39, 0xc6, 0x0b, 0x8f, 0xe4, 0x18, + 0x1f, 0x3c, 0x92, 0x63, 0x9c, 0xf0, 0x58, 0x8e, 0xe1, 0xc2, 0x63, 0x39, 0x86, 0x1b, 0x8f, 0xe5, + 0x18, 0xa2, 0x5c, 0xd2, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, 0x21, 0xae, + 0xd1, 0xc5, 0x16, 0x9a, 0xba, 0x08, 0xf7, 0xe8, 0x42, 0xc3, 0xb3, 0x02, 0x1c, 0xa2, 0x25, 0x95, + 0x05, 0xa9, 0xc5, 0x49, 0x6c, 0xe0, 0x60, 0x33, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x35, 0xf8, + 0xa2, 0xad, 0x0c, 0x02, 0x00, 0x00, } func (m *ApprovedCertificates) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/approved_certificates_by_subject.pb.go b/x/pki/types/approved_certificates_by_subject.pb.go index 545267e9f..85770a5ab 100644 --- a/x/pki/types/approved_certificates_by_subject.pb.go +++ b/x/pki/types/approved_certificates_by_subject.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/approved_certificates_by_subject.proto +// source: zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -31,7 +31,7 @@ func (m *ApprovedCertificatesBySubject) Reset() { *m = ApprovedCertifica func (m *ApprovedCertificatesBySubject) String() string { return proto.CompactTextString(m) } func (*ApprovedCertificatesBySubject) ProtoMessage() {} func (*ApprovedCertificatesBySubject) Descriptor() ([]byte, []int) { - return fileDescriptor_32862c9c0c83a774, []int{0} + return fileDescriptor_00fee9a70661a177, []int{0} } func (m *ApprovedCertificatesBySubject) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -79,26 +79,26 @@ func init() { } func init() { - proto.RegisterFile("pki/approved_certificates_by_subject.proto", fileDescriptor_32862c9c0c83a774) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject.proto", fileDescriptor_00fee9a70661a177) } -var fileDescriptor_32862c9c0c83a774 = []byte{ - // 235 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x2a, 0xc8, 0xce, 0xd4, - 0x4f, 0x2c, 0x28, 0x28, 0xca, 0x2f, 0x4b, 0x4d, 0x89, 0x4f, 0x4e, 0x2d, 0x2a, 0xc9, 0x4c, 0xcb, - 0x4c, 0x4e, 0x2c, 0x49, 0x2d, 0x8e, 0x4f, 0xaa, 0x8c, 0x2f, 0x2e, 0x4d, 0xca, 0x4a, 0x4d, 0x2e, - 0xd1, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xd2, 0xab, 0xca, 0x4c, 0x4f, 0x4a, 0x4d, 0x4d, 0xcc, - 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, 0x2a, 0x2d, - 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, 0xa7, 0x16, 0xe9, 0x15, - 0x64, 0x67, 0x2a, 0xc5, 0x73, 0xc9, 0x3a, 0x42, 0x4d, 0x76, 0x46, 0x32, 0xd8, 0xa9, 0x32, 0x18, - 0x62, 0xac, 0x90, 0x04, 0x17, 0x3b, 0xd4, 0x06, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, 0x18, - 0x57, 0x48, 0x85, 0x8b, 0x17, 0xca, 0xf4, 0x4e, 0xad, 0xf4, 0x4c, 0x29, 0x96, 0x60, 0x52, 0x60, - 0xd6, 0xe0, 0x0c, 0x42, 0x15, 0x74, 0x8a, 0x3b, 0xf1, 0x48, 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, - 0x07, 0x8f, 0xe4, 0x18, 0x27, 0x3c, 0x96, 0x63, 0xb8, 0xf0, 0x58, 0x8e, 0xe1, 0xc6, 0x63, 0x39, - 0x86, 0x28, 0x97, 0xf4, 0xcc, 0x92, 0x8c, 0xd2, 0x24, 0xbd, 0xe4, 0xfc, 0x5c, 0x7d, 0x88, 0xab, - 0x75, 0x61, 0xce, 0xd6, 0x47, 0x72, 0xb6, 0x2e, 0xc2, 0xdd, 0xba, 0x10, 0x87, 0xeb, 0x57, 0xe8, - 0x83, 0x42, 0xa4, 0xa4, 0xb2, 0x20, 0xb5, 0x38, 0x89, 0x0d, 0xec, 0x6f, 0x63, 0x40, 0x00, 0x00, - 0x00, 0xff, 0xff, 0x46, 0x08, 0xcb, 0x07, 0x25, 0x01, 0x00, 0x00, +var fileDescriptor_00fee9a70661a177 = []byte{ + // 236 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x0a, 0xad, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x17, 0x64, 0x67, 0xea, 0x27, 0x16, 0x14, 0x14, 0xe5, 0x97, 0xa5, 0xa6, 0xc4, + 0x27, 0xa7, 0x16, 0x95, 0x64, 0xa6, 0x65, 0x26, 0x27, 0x96, 0xa4, 0x16, 0xc7, 0x27, 0x55, 0xc6, + 0x17, 0x97, 0x26, 0x65, 0xa5, 0x26, 0x97, 0xe8, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0xe9, 0xa1, + 0x1a, 0xab, 0x87, 0xc7, 0x58, 0xbd, 0x82, 0xec, 0x4c, 0xa5, 0x78, 0x2e, 0x59, 0x47, 0xa8, 0xc9, + 0xce, 0x48, 0x06, 0x3b, 0x55, 0x06, 0x43, 0x8c, 0x15, 0x92, 0xe0, 0x62, 0x87, 0xda, 0x20, 0xc1, + 0xa8, 0xc0, 0xa8, 0xc1, 0x19, 0x04, 0xe3, 0x0a, 0xa9, 0x70, 0xf1, 0x42, 0x99, 0xde, 0xa9, 0x95, + 0x9e, 0x29, 0xc5, 0x12, 0x4c, 0x0a, 0xcc, 0x1a, 0x9c, 0x41, 0xa8, 0x82, 0x4e, 0x71, 0x27, 0x1e, + 0xc9, 0x31, 0x5e, 0x78, 0x24, 0xc7, 0xf8, 0xe0, 0x91, 0x1c, 0xe3, 0x84, 0xc7, 0x72, 0x0c, 0x17, + 0x1e, 0xcb, 0x31, 0xdc, 0x78, 0x2c, 0xc7, 0x10, 0xe5, 0x92, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, + 0x97, 0x9c, 0x9f, 0xab, 0x0f, 0x71, 0xb5, 0x2e, 0xb6, 0xd0, 0xd0, 0x45, 0xb8, 0x5b, 0x17, 0x1a, + 0x1e, 0x15, 0xe0, 0x10, 0x29, 0xa9, 0x2c, 0x48, 0x2d, 0x4e, 0x62, 0x03, 0xfb, 0xdb, 0x18, 0x10, + 0x00, 0x00, 0xff, 0xff, 0x96, 0xc1, 0xd5, 0x7d, 0x50, 0x01, 0x00, 0x00, } func (m *ApprovedCertificatesBySubject) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/approved_certificates_by_subject_key_id.pb.go b/x/pki/types/approved_certificates_by_subject_key_id.pb.go index 03a9017b4..8da94db3d 100644 --- a/x/pki/types/approved_certificates_by_subject_key_id.pb.go +++ b/x/pki/types/approved_certificates_by_subject_key_id.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/approved_certificates_by_subject_key_id.proto +// source: zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject_key_id.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -31,7 +31,7 @@ func (m *ApprovedCertificatesBySubjectKeyId) Reset() { *m = ApprovedCert func (m *ApprovedCertificatesBySubjectKeyId) String() string { return proto.CompactTextString(m) } func (*ApprovedCertificatesBySubjectKeyId) ProtoMessage() {} func (*ApprovedCertificatesBySubjectKeyId) Descriptor() ([]byte, []int) { - return fileDescriptor_3c174758e89e6673, []int{0} + return fileDescriptor_16abcf34f50089b9, []int{0} } func (m *ApprovedCertificatesBySubjectKeyId) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -79,28 +79,28 @@ func init() { } func init() { - proto.RegisterFile("pki/approved_certificates_by_subject_key_id.proto", fileDescriptor_3c174758e89e6673) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/approved_certificates_by_subject_key_id.proto", fileDescriptor_16abcf34f50089b9) } -var fileDescriptor_3c174758e89e6673 = []byte{ - // 258 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0xd0, 0x31, 0x4b, 0xc4, 0x30, - 0x18, 0xc6, 0xf1, 0x46, 0x51, 0xb0, 0x3a, 0x15, 0x84, 0xc3, 0x21, 0x1c, 0x9d, 0x6e, 0x69, 0x8a, - 0x3a, 0x3a, 0x79, 0xba, 0x88, 0x93, 0xe7, 0xe6, 0x60, 0x69, 0x92, 0xd7, 0xfa, 0xda, 0x9e, 0x0d, - 0x49, 0x2a, 0xc6, 0x4f, 0xe1, 0xee, 0x17, 0x72, 0xbc, 0xd1, 0x51, 0xda, 0x2f, 0x22, 0x35, 0x8a, - 0x71, 0xbc, 0x31, 0xef, 0xf0, 0xff, 0x85, 0x27, 0x3e, 0x54, 0x35, 0xe6, 0xa5, 0x52, 0xba, 0x7d, - 0x02, 0x59, 0x08, 0xd0, 0x16, 0xef, 0x50, 0x94, 0x16, 0x4c, 0xc1, 0x5d, 0x61, 0x3a, 0xfe, 0x00, - 0xc2, 0x16, 0x35, 0xb8, 0x02, 0x25, 0x53, 0xba, 0xb5, 0x6d, 0xc2, 0x5e, 0xb0, 0xe2, 0x00, 0x65, - 0xd3, 0x60, 0xf9, 0x28, 0x80, 0x49, 0x34, 0x56, 0x23, 0xef, 0x2c, 0x48, 0xd1, 0x2e, 0x95, 0xbf, - 0x36, 0x20, 0x2b, 0xd0, 0x4c, 0xd5, 0x78, 0xb0, 0x3f, 0x12, 0x41, 0xd9, 0x67, 0xd2, 0x37, 0x12, - 0xa7, 0xa7, 0x3f, 0xf0, 0x59, 0xe0, 0xce, 0xdd, 0xb5, 0x57, 0x2f, 0xc1, 0x5d, 0xc8, 0x24, 0x8d, - 0xf7, 0x4c, 0xf0, 0x9e, 0x90, 0x29, 0x99, 0xed, 0x2c, 0xfe, 0xdd, 0x92, 0xab, 0x78, 0x6b, 0xec, - 0x9b, 0xc9, 0xc6, 0x74, 0x73, 0xb6, 0x7b, 0x74, 0xb2, 0xe6, 0x0f, 0x59, 0xc0, 0x2f, 0x7c, 0x69, - 0x7e, 0xfb, 0xde, 0x53, 0xb2, 0xea, 0x29, 0xf9, 0xec, 0x29, 0x79, 0x1d, 0x68, 0xb4, 0x1a, 0x68, - 0xf4, 0x31, 0xd0, 0xe8, 0xe6, 0xbc, 0x42, 0x7b, 0xdf, 0x71, 0x26, 0xda, 0x65, 0xee, 0x9d, 0xec, - 0x17, 0xca, 0x03, 0x28, 0xfb, 0x93, 0x32, 0x4f, 0xe5, 0xcf, 0xf9, 0xb8, 0x84, 0x75, 0x0a, 0x0c, - 0xdf, 0xfe, 0x1e, 0xe1, 0xf8, 0x2b, 0x00, 0x00, 0xff, 0xff, 0x29, 0xe4, 0x03, 0x2a, 0x80, 0x01, - 0x00, 0x00, +var fileDescriptor_16abcf34f50089b9 = []byte{ + // 263 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x8a, 0xa9, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x17, 0x64, 0x67, 0xea, 0x27, 0x16, 0x14, 0x14, 0xe5, 0x97, 0xa5, 0xa6, 0xc4, + 0x27, 0xa7, 0x16, 0x95, 0x64, 0xa6, 0x65, 0x26, 0x27, 0x96, 0xa4, 0x16, 0xc7, 0x27, 0x55, 0xc6, + 0x17, 0x97, 0x26, 0x65, 0xa5, 0x26, 0x97, 0xc4, 0x67, 0xa7, 0x56, 0xc6, 0x67, 0xa6, 0xe8, 0x15, + 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0xe9, 0xa1, 0x9a, 0xae, 0x87, 0xc7, 0x74, 0xbd, 0x82, 0xec, 0x4c, + 0x29, 0x07, 0x12, 0x5d, 0x83, 0xe4, 0x08, 0x88, 0x8d, 0x4a, 0xb3, 0x19, 0xb9, 0x94, 0x1c, 0xa1, + 0x6e, 0x74, 0x46, 0x72, 0xa2, 0x53, 0x65, 0x30, 0xc4, 0x81, 0xde, 0xa9, 0x95, 0x9e, 0x29, 0x42, + 0x4a, 0x5c, 0x3c, 0xc5, 0x48, 0x7c, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, 0x14, 0x31, 0xa1, + 0x40, 0x2e, 0x56, 0x90, 0xf9, 0xc5, 0x12, 0x4c, 0x0a, 0xcc, 0x1a, 0xdc, 0x46, 0xd6, 0x24, 0x7a, + 0x46, 0x0f, 0xc9, 0xfa, 0x20, 0x88, 0x49, 0x4e, 0x71, 0x27, 0x1e, 0xc9, 0x31, 0x5e, 0x78, 0x24, + 0xc7, 0xf8, 0xe0, 0x91, 0x1c, 0xe3, 0x84, 0xc7, 0x72, 0x0c, 0x17, 0x1e, 0xcb, 0x31, 0xdc, 0x78, + 0x2c, 0xc7, 0x10, 0xe5, 0x92, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, 0xab, 0x0f, + 0xb1, 0x47, 0x17, 0x5b, 0x28, 0xe8, 0x22, 0x6c, 0xd2, 0x85, 0x86, 0x43, 0x05, 0x38, 0x24, 0x4a, + 0x2a, 0x0b, 0x52, 0x8b, 0x93, 0xd8, 0xc0, 0x81, 0x60, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0xf1, + 0x9e, 0xc0, 0x52, 0xd6, 0x01, 0x00, 0x00, } func (m *ApprovedCertificatesBySubjectKeyId) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/approved_root_certificates.pb.go b/x/pki/types/approved_root_certificates.pb.go index 353614980..681227a83 100644 --- a/x/pki/types/approved_root_certificates.pb.go +++ b/x/pki/types/approved_root_certificates.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/approved_root_certificates.proto +// source: zigbeealliance/distributedcomplianceledger/pki/approved_root_certificates.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -30,7 +30,7 @@ func (m *ApprovedRootCertificates) Reset() { *m = ApprovedRootCertificat func (m *ApprovedRootCertificates) String() string { return proto.CompactTextString(m) } func (*ApprovedRootCertificates) ProtoMessage() {} func (*ApprovedRootCertificates) Descriptor() ([]byte, []int) { - return fileDescriptor_ba0e460850872043, []int{0} + return fileDescriptor_43b429c1dbf61a0d, []int{0} } func (m *ApprovedRootCertificates) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -71,26 +71,26 @@ func init() { } func init() { - proto.RegisterFile("pki/approved_root_certificates.proto", fileDescriptor_ba0e460850872043) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/approved_root_certificates.proto", fileDescriptor_43b429c1dbf61a0d) } -var fileDescriptor_ba0e460850872043 = []byte{ - // 235 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0xcf, 0xbb, 0x4e, 0xc3, 0x30, - 0x14, 0xc6, 0xf1, 0x44, 0x08, 0x86, 0xb0, 0x75, 0xaa, 0x18, 0xac, 0x0a, 0x31, 0xb0, 0xc4, 0x96, - 0xe0, 0x09, 0xb8, 0x0d, 0xac, 0x1d, 0x41, 0x22, 0x8a, 0xed, 0x43, 0x38, 0x6a, 0xda, 0x63, 0xd9, - 0xa7, 0xdc, 0x9e, 0x82, 0xc7, 0x62, 0xec, 0xc8, 0x88, 0x92, 0x17, 0x41, 0xc6, 0x5c, 0xbc, 0x76, - 0xb4, 0x2d, 0xff, 0x7f, 0xfa, 0xaa, 0x23, 0xb7, 0x40, 0xd5, 0x3a, 0xe7, 0xe9, 0x11, 0x6c, 0xe3, - 0x89, 0xb8, 0x31, 0xe0, 0x19, 0xef, 0xd1, 0xb4, 0x0c, 0x41, 0x3a, 0x4f, 0x4c, 0x13, 0xf9, 0x8a, - 0x9d, 0x06, 0x68, 0xfb, 0x1e, 0xdb, 0x95, 0x01, 0x69, 0x31, 0xb0, 0x47, 0xbd, 0x66, 0xb0, 0x86, - 0x96, 0x2e, 0xdd, 0xf6, 0x60, 0x3b, 0xf0, 0xd2, 0x2d, 0xf0, 0x60, 0x16, 0xab, 0x59, 0xa7, 0x41, - 0x0b, 0xab, 0x78, 0x88, 0xef, 0xb1, 0x78, 0xf8, 0x54, 0x4d, 0xcf, 0x7e, 0xd4, 0x39, 0x11, 0x5f, - 0x64, 0xe6, 0xe4, 0xb6, 0xda, 0x8d, 0x7f, 0xc3, 0xb4, 0x9c, 0xed, 0x1c, 0xef, 0x9f, 0x5c, 0x6d, - 0xa9, 0xcb, 0x2c, 0x76, 0xfd, 0xe7, 0xce, 0x53, 0xf3, 0xfc, 0xee, 0x7d, 0x10, 0xe5, 0x66, 0x10, - 0xe5, 0xe7, 0x20, 0xca, 0xb7, 0x51, 0x14, 0x9b, 0x51, 0x14, 0x1f, 0xa3, 0x28, 0x6e, 0x2e, 0x3b, - 0xe4, 0x87, 0xb5, 0x96, 0x86, 0x96, 0x2a, 0x89, 0xf5, 0x2f, 0xa9, 0x32, 0xb2, 0xfe, 0x37, 0xeb, - 0x84, 0xaa, 0x67, 0x15, 0xf7, 0xf2, 0x8b, 0x83, 0xa0, 0xf7, 0xbe, 0xf7, 0x9d, 0x7e, 0x05, 0x00, - 0x00, 0xff, 0xff, 0x24, 0xbc, 0x4b, 0x25, 0x59, 0x01, 0x00, 0x00, +var fileDescriptor_43b429c1dbf61a0d = []byte{ + // 238 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xf2, 0xaf, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x17, 0x64, 0x67, 0xea, 0x27, 0x16, 0x14, 0x14, 0xe5, 0x97, 0xa5, 0xa6, 0xc4, + 0x17, 0xe5, 0xe7, 0x97, 0xc4, 0x27, 0xa7, 0x16, 0x95, 0x64, 0xa6, 0x65, 0x26, 0x27, 0x96, 0xa4, + 0x16, 0xeb, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0xe9, 0xa1, 0x1a, 0xa8, 0x87, 0xc7, 0x40, 0xbd, + 0x82, 0xec, 0x4c, 0x29, 0x6f, 0x12, 0x1d, 0x80, 0x64, 0x65, 0x7c, 0x66, 0x4a, 0x6a, 0x1e, 0x88, + 0x03, 0x32, 0x0a, 0x64, 0xb9, 0x52, 0x39, 0x97, 0x84, 0x23, 0xd4, 0x81, 0x41, 0xf9, 0xf9, 0x25, + 0xce, 0x48, 0xce, 0x13, 0x8a, 0xe6, 0x62, 0x05, 0xe9, 0x2d, 0x96, 0x60, 0x54, 0x60, 0xd6, 0xe0, + 0x36, 0x72, 0x25, 0xd1, 0xa1, 0x7a, 0x48, 0x86, 0x79, 0xc2, 0xed, 0x0d, 0x82, 0x98, 0xe9, 0x14, + 0x77, 0xe2, 0x91, 0x1c, 0xe3, 0x85, 0x47, 0x72, 0x8c, 0x0f, 0x1e, 0xc9, 0x31, 0x4e, 0x78, 0x2c, + 0xc7, 0x70, 0xe1, 0xb1, 0x1c, 0xc3, 0x8d, 0xc7, 0x72, 0x0c, 0x51, 0x2e, 0xe9, 0x99, 0x25, 0x19, + 0xa5, 0x49, 0x7a, 0xc9, 0xf9, 0xb9, 0xfa, 0x10, 0x1b, 0x75, 0xb1, 0xf9, 0x55, 0x17, 0x61, 0xa7, + 0x2e, 0xd4, 0xb7, 0x15, 0x60, 0xff, 0x96, 0x54, 0x16, 0xa4, 0x16, 0x27, 0xb1, 0x81, 0xfd, 0x67, + 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0x86, 0xee, 0xc1, 0x7e, 0xaf, 0x01, 0x00, 0x00, } func (m *ApprovedRootCertificates) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/certificate.pb.go b/x/pki/types/certificate.pb.go index f9611b06a..a0fc4f01f 100644 --- a/x/pki/types/certificate.pb.go +++ b/x/pki/types/certificate.pb.go @@ -1,13 +1,13 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/certificate.proto +// source: zigbeealliance/distributedcomplianceledger/pki/certificate.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -47,7 +47,7 @@ func (m *Certificate) Reset() { *m = Certificate{} } func (m *Certificate) String() string { return proto.CompactTextString(m) } func (*Certificate) ProtoMessage() {} func (*Certificate) Descriptor() ([]byte, []int) { - return fileDescriptor_2657e3d88fce7825, []int{0} + return fileDescriptor_0ec8ab7f7382ccfa, []int{0} } func (m *Certificate) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -192,43 +192,45 @@ func init() { proto.RegisterType((*Certificate)(nil), "zigbeealliance.distributedcomplianceledger.pki.Certificate") } -func init() { proto.RegisterFile("pki/certificate.proto", fileDescriptor_2657e3d88fce7825) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/certificate.proto", fileDescriptor_0ec8ab7f7382ccfa) +} -var fileDescriptor_2657e3d88fce7825 = []byte{ - // 514 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x93, 0x41, 0x8b, 0xd3, 0x40, - 0x14, 0xc7, 0x1b, 0x6b, 0xdb, 0xed, 0x74, 0xbb, 0xbb, 0x0c, 0x55, 0xc6, 0x1e, 0x62, 0x28, 0x22, - 0x45, 0x6c, 0xa2, 0x2e, 0xf5, 0x20, 0xec, 0x61, 0xab, 0x20, 0x22, 0xac, 0x90, 0x8a, 0x07, 0x0f, - 0xca, 0x24, 0x79, 0xa6, 0xe3, 0x26, 0x9d, 0x30, 0x33, 0xa9, 0x5b, 0x3f, 0x85, 0x57, 0xbf, 0x87, - 0x1f, 0xc2, 0xe3, 0xe2, 0xc9, 0x93, 0x48, 0xfb, 0x0d, 0xfc, 0x04, 0x92, 0x99, 0x94, 0xa6, 0x7a, - 0x12, 0x6f, 0xf9, 0xff, 0xde, 0xcb, 0x7b, 0xff, 0xf7, 0x1e, 0x83, 0xae, 0x65, 0xe7, 0xcc, 0x0b, - 0x41, 0x28, 0xf6, 0x8e, 0x85, 0x54, 0x81, 0x9b, 0x09, 0xae, 0x38, 0x76, 0x3f, 0xb2, 0x38, 0x00, - 0xa0, 0x49, 0xc2, 0xe8, 0x3c, 0x04, 0x37, 0x62, 0x52, 0x09, 0x16, 0xe4, 0x0a, 0xa2, 0x90, 0xa7, - 0x99, 0xa1, 0x09, 0x44, 0x31, 0x08, 0x37, 0x3b, 0x67, 0xfd, 0x1b, 0x21, 0x97, 0x29, 0x97, 0x6f, - 0xf5, 0xdf, 0x9e, 0x11, 0xa6, 0x54, 0xbf, 0x17, 0xf3, 0x98, 0x1b, 0x5e, 0x7c, 0x95, 0xf4, 0xb0, - 0xe8, 0x1b, 0x0b, 0x3a, 0x57, 0x06, 0x0c, 0x3e, 0x37, 0x50, 0xe7, 0xf1, 0xd6, 0x07, 0x26, 0xa8, - 0x95, 0x41, 0x5a, 0x10, 0x62, 0x39, 0xd6, 0xb0, 0xed, 0x6f, 0x24, 0x1e, 0xa0, 0x7d, 0x09, 0x82, - 0xd1, 0xe4, 0x2c, 0x4f, 0x03, 0x10, 0xe4, 0x8a, 0x0e, 0xef, 0x30, 0x7c, 0x1d, 0x35, 0x99, 0x94, - 0x39, 0x08, 0x52, 0xd7, 0xd1, 0x52, 0xe1, 0xdb, 0xe8, 0x80, 0xe6, 0x6a, 0xc6, 0x05, 0x53, 0xcb, - 0xe7, 0xb0, 0x7c, 0x16, 0x91, 0xab, 0x3a, 0xfe, 0x07, 0xc5, 0x0e, 0xea, 0x08, 0xce, 0xd5, 0x34, - 0x0f, 0xde, 0x43, 0xa8, 0x48, 0x43, 0x27, 0x55, 0x11, 0xbe, 0x83, 0x8e, 0x2a, 0xd2, 0xd4, 0x6a, - 0xea, 0xb4, 0xbf, 0xb8, 0x71, 0xe3, 0x73, 0xae, 0x48, 0xcb, 0xb1, 0x86, 0x7b, 0x7e, 0xa9, 0xb0, - 0x8b, 0x1a, 0xfc, 0xc3, 0x1c, 0x04, 0xd9, 0x2b, 0x7e, 0x9c, 0x90, 0x6f, 0x5f, 0x46, 0xbd, 0x72, - 0x77, 0xa7, 0x51, 0x24, 0x40, 0xca, 0xa9, 0x12, 0x6c, 0x1e, 0xfb, 0x26, 0xad, 0xd8, 0x89, 0x2c, - 0x1d, 0xb5, 0xcd, 0x4e, 0x4a, 0xa9, 0x77, 0x52, 0x75, 0x82, 0xca, 0x9d, 0x54, 0x5d, 0x4c, 0x51, - 0x9b, 0x66, 0x99, 0xe0, 0x0b, 0x9a, 0x48, 0xd2, 0x71, 0xea, 0xc3, 0xce, 0x83, 0xf1, 0x3f, 0xde, - 0xd9, 0x7d, 0x5a, 0x5c, 0xcc, 0xdf, 0xd6, 0xc1, 0xb7, 0x50, 0xb7, 0x6c, 0x72, 0x2a, 0x5f, 0xc2, - 0x85, 0x22, 0xfb, 0xba, 0xf3, 0x2e, 0xc4, 0x2f, 0x50, 0x4b, 0x40, 0xa1, 0x25, 0xe9, 0xfe, 0x4f, - 0xe3, 0x4d, 0x15, 0x7c, 0x0f, 0xd5, 0x17, 0x2c, 0x22, 0x07, 0x8e, 0x35, 0x6c, 0x4c, 0xec, 0x5f, - 0x3f, 0x6e, 0xf6, 0x17, 0x34, 0x61, 0x11, 0x55, 0xf0, 0x68, 0x10, 0x2b, 0x38, 0xb9, 0x7f, 0x37, - 0x51, 0x70, 0xf2, 0x70, 0x3c, 0x3e, 0x1e, 0x0f, 0xfc, 0x22, 0x15, 0xf7, 0x50, 0x83, 0xc9, 0x33, - 0x1e, 0x92, 0x43, 0x7d, 0x02, 0x23, 0xb4, 0xfd, 0x70, 0x06, 0x29, 0x7d, 0x05, 0x42, 0x32, 0x3e, - 0x27, 0x47, 0x8e, 0x35, 0xec, 0xfa, 0xbb, 0x70, 0xf2, 0xe6, 0xeb, 0xca, 0xb6, 0x2e, 0x57, 0xb6, - 0xf5, 0x73, 0x65, 0x5b, 0x9f, 0xd6, 0x76, 0xed, 0x72, 0x6d, 0xd7, 0xbe, 0xaf, 0xed, 0xda, 0xeb, - 0x27, 0x31, 0x53, 0xb3, 0x3c, 0x70, 0x43, 0x9e, 0x7a, 0x66, 0xa2, 0xd1, 0x66, 0x24, 0xaf, 0x32, - 0xd2, 0x68, 0x3b, 0xd3, 0xc8, 0x0c, 0xe5, 0x5d, 0x78, 0xc5, 0x0b, 0x50, 0xcb, 0x0c, 0x64, 0xd0, - 0xd4, 0x4f, 0xe0, 0xf8, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf7, 0xca, 0x87, 0x41, 0x8d, 0x03, - 0x00, 0x00, +var fileDescriptor_0ec8ab7f7382ccfa = []byte{ + // 520 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x53, 0xd1, 0x8a, 0xd3, 0x4c, + 0x14, 0x6e, 0xfe, 0xfe, 0x6d, 0xb7, 0xd3, 0xed, 0xba, 0x0c, 0x45, 0xc6, 0x5e, 0xc4, 0x50, 0x44, + 0x8a, 0xd8, 0x44, 0x5d, 0xea, 0xc5, 0xc2, 0x82, 0x5b, 0x05, 0x11, 0x61, 0x85, 0x54, 0xbc, 0xf0, + 0x42, 0x99, 0x24, 0xc7, 0x74, 0xdc, 0x24, 0x13, 0x66, 0x26, 0x75, 0xeb, 0x53, 0x78, 0xeb, 0x7b, + 0xf8, 0x10, 0x5e, 0x2e, 0x5e, 0x79, 0x25, 0xd2, 0xbe, 0x81, 0x4f, 0x20, 0x99, 0xc9, 0xd2, 0x56, + 0x45, 0x58, 0xbc, 0x9b, 0xef, 0x3b, 0xe7, 0x7c, 0xe7, 0x9b, 0x33, 0x67, 0xd0, 0x83, 0xf7, 0x2c, + 0x0e, 0x00, 0x68, 0x92, 0x30, 0x9a, 0x85, 0xe0, 0x45, 0x4c, 0x2a, 0xc1, 0x82, 0x42, 0x41, 0x14, + 0xf2, 0x34, 0x37, 0x6c, 0x02, 0x51, 0x0c, 0xc2, 0xcb, 0x4f, 0x99, 0x17, 0x82, 0x50, 0xec, 0x0d, + 0x0b, 0xa9, 0x02, 0x37, 0x17, 0x5c, 0x71, 0xec, 0x6e, 0x2b, 0xb8, 0x7f, 0x51, 0x70, 0xf3, 0x53, + 0xd6, 0xbf, 0x16, 0x72, 0x99, 0x72, 0xf9, 0x5a, 0x57, 0x7b, 0x06, 0x18, 0xa9, 0x7e, 0x2f, 0xe6, + 0x31, 0x37, 0x7c, 0x79, 0xaa, 0xd8, 0xc3, 0x4b, 0x5a, 0x8c, 0x05, 0xcd, 0x94, 0xa9, 0x1d, 0x7c, + 0x6c, 0xa0, 0xce, 0xc3, 0xb5, 0x65, 0x4c, 0x50, 0x2b, 0x87, 0xb4, 0x64, 0x88, 0xe5, 0x58, 0xc3, + 0xb6, 0x7f, 0x01, 0xf1, 0x00, 0xed, 0x4a, 0x10, 0x8c, 0x26, 0x27, 0x45, 0x1a, 0x80, 0x20, 0xff, + 0xe9, 0xf0, 0x16, 0x87, 0xaf, 0xa2, 0x26, 0x93, 0xb2, 0x00, 0x41, 0xea, 0x3a, 0x5a, 0x21, 0x7c, + 0x13, 0xed, 0xd1, 0x42, 0xcd, 0xb8, 0x60, 0x6a, 0xf1, 0x14, 0x16, 0x4f, 0x22, 0xf2, 0xbf, 0x8e, + 0xff, 0xc2, 0x62, 0x07, 0x75, 0x04, 0xe7, 0x6a, 0x5a, 0x04, 0x6f, 0x21, 0x54, 0xa4, 0xa1, 0x93, + 0x36, 0x29, 0x7c, 0x0b, 0xed, 0x6f, 0x40, 0xa3, 0xd5, 0xd4, 0x69, 0xbf, 0xf1, 0xc6, 0x8d, 0xcf, + 0xb9, 0x22, 0x2d, 0xc7, 0x1a, 0xee, 0xf8, 0x15, 0xc2, 0x2e, 0x6a, 0xf0, 0x77, 0x19, 0x08, 0xb2, + 0x53, 0x16, 0x4e, 0xc8, 0x97, 0x4f, 0xa3, 0x5e, 0x35, 0xe6, 0xe3, 0x28, 0x12, 0x20, 0xe5, 0x54, + 0x09, 0x96, 0xc5, 0xbe, 0x49, 0x2b, 0x67, 0x22, 0x2b, 0x47, 0x6d, 0x33, 0x93, 0x0a, 0xea, 0x99, + 0x6c, 0x3a, 0x41, 0xd5, 0x4c, 0x36, 0x5d, 0x4c, 0x51, 0x9b, 0xe6, 0xb9, 0xe0, 0x73, 0x9a, 0x48, + 0xd2, 0x71, 0xea, 0xc3, 0xce, 0xbd, 0xf1, 0x25, 0x57, 0xc2, 0x7d, 0x5c, 0xbe, 0x98, 0xbf, 0xd6, + 0xc1, 0x37, 0x50, 0xb7, 0x6a, 0x72, 0x2c, 0x9f, 0xc3, 0x99, 0x22, 0xbb, 0xba, 0xf3, 0x36, 0x89, + 0x9f, 0xa1, 0x96, 0x80, 0x12, 0x4b, 0xd2, 0xfd, 0x97, 0xc6, 0x17, 0x2a, 0xf8, 0x0e, 0xaa, 0xcf, + 0x59, 0x44, 0xf6, 0x1c, 0x6b, 0xd8, 0x98, 0xd8, 0x3f, 0xbe, 0x5d, 0xef, 0xcf, 0x69, 0xc2, 0x22, + 0xaa, 0xe0, 0x70, 0x10, 0x2b, 0x38, 0xba, 0x7b, 0x3b, 0x51, 0x70, 0x74, 0x7f, 0x3c, 0x3e, 0x18, + 0x0f, 0xfc, 0x32, 0x15, 0xf7, 0x50, 0x83, 0xc9, 0x13, 0x1e, 0x92, 0x2b, 0xfa, 0x09, 0x0c, 0xd0, + 0xf6, 0xc3, 0x19, 0xa4, 0xf4, 0x05, 0x08, 0xc9, 0x78, 0x46, 0xf6, 0x1d, 0x6b, 0xd8, 0xf5, 0xb7, + 0xc9, 0xc9, 0xab, 0xcf, 0x4b, 0xdb, 0x3a, 0x5f, 0xda, 0xd6, 0xf7, 0xa5, 0x6d, 0x7d, 0x58, 0xd9, + 0xb5, 0xf3, 0x95, 0x5d, 0xfb, 0xba, 0xb2, 0x6b, 0x2f, 0x1f, 0xc5, 0x4c, 0xcd, 0x8a, 0xc0, 0x0d, + 0x79, 0xea, 0x99, 0x1b, 0x8d, 0xfe, 0xb4, 0xfd, 0xa3, 0xf5, 0x9d, 0x46, 0xd5, 0xfe, 0x9f, 0xe9, + 0x1f, 0xa0, 0x16, 0x39, 0xc8, 0xa0, 0xa9, 0xbf, 0xc0, 0xc1, 0xcf, 0x00, 0x00, 0x00, 0xff, 0xff, + 0xb1, 0xd8, 0xc8, 0xf6, 0xe3, 0x03, 0x00, 0x00, } func (m *Certificate) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/certificate_identifier.pb.go b/x/pki/types/certificate_identifier.pb.go index 27e818a6e..5c5a22b9b 100644 --- a/x/pki/types/certificate_identifier.pb.go +++ b/x/pki/types/certificate_identifier.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/certificate_identifier.proto +// source: zigbeealliance/distributedcomplianceledger/pki/certificate_identifier.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -31,7 +31,7 @@ func (m *CertificateIdentifier) Reset() { *m = CertificateIdentifier{} } func (m *CertificateIdentifier) String() string { return proto.CompactTextString(m) } func (*CertificateIdentifier) ProtoMessage() {} func (*CertificateIdentifier) Descriptor() ([]byte, []int) { - return fileDescriptor_e27460f91c3cafeb, []int{0} + return fileDescriptor_8d2f4acfd17683fc, []int{0} } func (m *CertificateIdentifier) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -78,24 +78,26 @@ func init() { proto.RegisterType((*CertificateIdentifier)(nil), "zigbeealliance.distributedcomplianceledger.pki.CertificateIdentifier") } -func init() { proto.RegisterFile("pki/certificate_identifier.proto", fileDescriptor_e27460f91c3cafeb) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/certificate_identifier.proto", fileDescriptor_8d2f4acfd17683fc) +} -var fileDescriptor_e27460f91c3cafeb = []byte{ - // 216 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x28, 0xc8, 0xce, 0xd4, - 0x4f, 0x4e, 0x2d, 0x2a, 0xc9, 0x4c, 0xcb, 0x4c, 0x4e, 0x2c, 0x49, 0x8d, 0xcf, 0x4c, 0x49, 0xcd, - 0x03, 0x71, 0x52, 0x8b, 0xf4, 0x0a, 0x8a, 0xf2, 0x4b, 0xf2, 0x85, 0xf4, 0xaa, 0x32, 0xd3, 0x93, - 0x52, 0x53, 0x13, 0x73, 0x72, 0x32, 0x13, 0xf3, 0x92, 0x53, 0xf5, 0x52, 0x32, 0x8b, 0x4b, 0x8a, - 0x32, 0x93, 0x4a, 0x4b, 0x52, 0x53, 0x92, 0xf3, 0x73, 0x0b, 0x20, 0xa2, 0x39, 0xa9, 0x29, 0xe9, - 0x20, 0x1d, 0xd9, 0x99, 0x4a, 0xa1, 0x5c, 0xa2, 0xce, 0x08, 0xf3, 0x3c, 0xe1, 0xc6, 0x09, 0x49, - 0x70, 0xb1, 0x17, 0x97, 0x26, 0x65, 0xa5, 0x26, 0x97, 0x48, 0x30, 0x2a, 0x30, 0x6a, 0x70, 0x06, - 0xc1, 0xb8, 0x42, 0x4a, 0x5c, 0x3c, 0x50, 0xa6, 0x77, 0x6a, 0xa5, 0x67, 0x8a, 0x04, 0x13, 0x58, - 0x1a, 0x45, 0xcc, 0x29, 0xee, 0xc4, 0x23, 0x39, 0xc6, 0x0b, 0x8f, 0xe4, 0x18, 0x1f, 0x3c, 0x92, - 0x63, 0x9c, 0xf0, 0x58, 0x8e, 0xe1, 0xc2, 0x63, 0x39, 0x86, 0x1b, 0x8f, 0xe5, 0x18, 0xa2, 0x5c, - 0xd2, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, 0x21, 0x6e, 0xd5, 0x85, 0x39, - 0x56, 0x1f, 0xc9, 0xb1, 0xba, 0x08, 0xd7, 0xea, 0x42, 0x9c, 0xab, 0x5f, 0xa1, 0x0f, 0xf2, 0x7d, - 0x49, 0x65, 0x41, 0x6a, 0x71, 0x12, 0x1b, 0xd8, 0xb7, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, - 0xc9, 0x5e, 0x10, 0xd7, 0x11, 0x01, 0x00, 0x00, +var fileDescriptor_8d2f4acfd17683fc = []byte{ + // 219 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xf2, 0xae, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x17, 0x64, 0x67, 0xea, 0x27, 0xa7, 0x16, 0x95, 0x64, 0xa6, 0x65, 0x26, 0x27, + 0x96, 0xa4, 0xc6, 0x67, 0xa6, 0xa4, 0xe6, 0x81, 0x38, 0xa9, 0x45, 0x7a, 0x05, 0x45, 0xf9, 0x25, + 0xf9, 0x42, 0x7a, 0xa8, 0x86, 0xe9, 0xe1, 0x31, 0x4c, 0xaf, 0x20, 0x3b, 0x53, 0x29, 0x94, 0x4b, + 0xd4, 0x19, 0x61, 0x9e, 0x27, 0xdc, 0x38, 0x21, 0x09, 0x2e, 0xf6, 0xe2, 0xd2, 0xa4, 0xac, 0xd4, + 0xe4, 0x12, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, 0x18, 0x57, 0x48, 0x89, 0x8b, 0x07, 0xca, + 0xf4, 0x4e, 0xad, 0xf4, 0x4c, 0x91, 0x60, 0x02, 0x4b, 0xa3, 0x88, 0x39, 0xc5, 0x9d, 0x78, 0x24, + 0xc7, 0x78, 0xe1, 0x91, 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x13, 0x1e, 0xcb, 0x31, 0x5c, 0x78, + 0x2c, 0xc7, 0x70, 0xe3, 0xb1, 0x1c, 0x43, 0x94, 0x4b, 0x7a, 0x66, 0x49, 0x46, 0x69, 0x92, 0x5e, + 0x72, 0x7e, 0xae, 0x3e, 0xc4, 0xad, 0xba, 0xd8, 0x7c, 0xae, 0x8b, 0x70, 0xad, 0x2e, 0xd4, 0xef, + 0x15, 0x60, 0xdf, 0x97, 0x54, 0x16, 0xa4, 0x16, 0x27, 0xb1, 0x81, 0x7d, 0x6b, 0x0c, 0x08, 0x00, + 0x00, 0xff, 0xff, 0xaa, 0x25, 0x51, 0x60, 0x3c, 0x01, 0x00, 0x00, } func (m *CertificateIdentifier) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/child_certificates.pb.go b/x/pki/types/child_certificates.pb.go index f49e021d6..be9654666 100644 --- a/x/pki/types/child_certificates.pb.go +++ b/x/pki/types/child_certificates.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/child_certificates.proto +// source: zigbeealliance/distributedcomplianceledger/pki/child_certificates.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -32,7 +32,7 @@ func (m *ChildCertificates) Reset() { *m = ChildCertificates{} } func (m *ChildCertificates) String() string { return proto.CompactTextString(m) } func (*ChildCertificates) ProtoMessage() {} func (*ChildCertificates) Descriptor() ([]byte, []int) { - return fileDescriptor_1f81991ee339011f, []int{0} + return fileDescriptor_cda1dafd82862476, []int{0} } func (m *ChildCertificates) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -86,27 +86,30 @@ func init() { proto.RegisterType((*ChildCertificates)(nil), "zigbeealliance.distributedcomplianceledger.pki.ChildCertificates") } -func init() { proto.RegisterFile("pki/child_certificates.proto", fileDescriptor_1f81991ee339011f) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/child_certificates.proto", fileDescriptor_cda1dafd82862476) +} -var fileDescriptor_1f81991ee339011f = []byte{ - // 270 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x29, 0xc8, 0xce, 0xd4, - 0x4f, 0xce, 0xc8, 0xcc, 0x49, 0x89, 0x4f, 0x4e, 0x2d, 0x2a, 0xc9, 0x4c, 0xcb, 0x4c, 0x4e, 0x2c, - 0x49, 0x2d, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xd2, 0xab, 0xca, 0x4c, 0x4f, 0x4a, 0x4d, - 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, - 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, 0xa7, 0x16, - 0xe9, 0x15, 0x64, 0x67, 0x4a, 0x29, 0x80, 0x4d, 0x43, 0x98, 0x13, 0x9f, 0x99, 0x92, 0x9a, 0x07, - 0xe2, 0x80, 0xe4, 0x41, 0x26, 0x2a, 0x6d, 0x61, 0xe4, 0x12, 0x74, 0x06, 0x59, 0xe7, 0x8c, 0x64, - 0x9b, 0x90, 0x18, 0x17, 0x5b, 0x66, 0x71, 0x71, 0x69, 0x6a, 0x91, 0x04, 0xa3, 0x02, 0xa3, 0x06, - 0x67, 0x10, 0x94, 0x27, 0xa4, 0xc6, 0xc5, 0x97, 0x58, 0x5a, 0x92, 0x91, 0x5f, 0x94, 0x59, 0x52, - 0xe9, 0x9d, 0x5a, 0xe9, 0x99, 0x22, 0xc1, 0x04, 0x96, 0x47, 0x13, 0x15, 0x8a, 0xe7, 0x62, 0x07, - 0xd9, 0xea, 0x99, 0x52, 0x2c, 0xc1, 0xac, 0xc0, 0xac, 0xc1, 0x6d, 0xe4, 0x4a, 0xa2, 0xcb, 0xf5, - 0x90, 0x9c, 0xe3, 0x09, 0x77, 0x73, 0x10, 0xcc, 0x54, 0xa7, 0xb8, 0x13, 0x8f, 0xe4, 0x18, 0x2f, - 0x3c, 0x92, 0x63, 0x7c, 0xf0, 0x48, 0x8e, 0x71, 0xc2, 0x63, 0x39, 0x86, 0x0b, 0x8f, 0xe5, 0x18, - 0x6e, 0x3c, 0x96, 0x63, 0x88, 0x72, 0x49, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, - 0xd5, 0x87, 0xd8, 0xa9, 0x0b, 0xb3, 0x54, 0x1f, 0xc9, 0x52, 0x5d, 0x84, 0xad, 0xba, 0x10, 0x6b, - 0xf5, 0x2b, 0xf4, 0x41, 0xa1, 0x55, 0x52, 0x59, 0x90, 0x5a, 0x9c, 0xc4, 0x06, 0x0e, 0x1d, 0x63, - 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0xef, 0x80, 0x6d, 0x3d, 0x8f, 0x01, 0x00, 0x00, +var fileDescriptor_cda1dafd82862476 = []byte{ + // 273 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0xaf, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x17, 0x64, 0x67, 0xea, 0x27, 0x67, 0x64, 0xe6, 0xa4, 0xc4, 0x27, 0xa7, 0x16, + 0x95, 0x64, 0xa6, 0x65, 0x26, 0x27, 0x96, 0xa4, 0x16, 0xeb, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, + 0xe9, 0xa1, 0x1a, 0xa4, 0x87, 0xc7, 0x20, 0xbd, 0x82, 0xec, 0x4c, 0x29, 0x6f, 0x52, 0x2d, 0x46, + 0x58, 0x19, 0x9f, 0x99, 0x92, 0x9a, 0x07, 0xe2, 0x80, 0x8c, 0x02, 0x59, 0xae, 0xb4, 0x85, 0x91, + 0x4b, 0xd0, 0x19, 0xe4, 0x32, 0x67, 0x24, 0x87, 0x09, 0x89, 0x71, 0xb1, 0x65, 0x16, 0x17, 0x97, + 0xa6, 0x16, 0x49, 0x30, 0x2a, 0x30, 0x6a, 0x70, 0x06, 0x41, 0x79, 0x42, 0x6a, 0x5c, 0x7c, 0x89, + 0xa5, 0x25, 0x19, 0xf9, 0x45, 0x99, 0x25, 0x95, 0xde, 0xa9, 0x95, 0x9e, 0x29, 0x12, 0x4c, 0x60, + 0x79, 0x34, 0x51, 0xa1, 0x78, 0x2e, 0x76, 0x90, 0xad, 0x9e, 0x29, 0xc5, 0x12, 0xcc, 0x0a, 0xcc, + 0x1a, 0xdc, 0x46, 0xae, 0x24, 0x7a, 0x52, 0x0f, 0xc9, 0x39, 0x9e, 0x70, 0x37, 0x07, 0xc1, 0x4c, + 0x75, 0x8a, 0x3b, 0xf1, 0x48, 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, 0xe4, 0x18, 0x27, + 0x3c, 0x96, 0x63, 0xb8, 0xf0, 0x58, 0x8e, 0xe1, 0xc6, 0x63, 0x39, 0x86, 0x28, 0x97, 0xf4, 0xcc, + 0x92, 0x8c, 0xd2, 0x24, 0xbd, 0xe4, 0xfc, 0x5c, 0x7d, 0x88, 0x9d, 0xba, 0xd8, 0x42, 0x4a, 0x17, + 0x61, 0xab, 0x2e, 0x34, 0xac, 0x2a, 0xc0, 0xa1, 0x55, 0x52, 0x59, 0x90, 0x5a, 0x9c, 0xc4, 0x06, + 0x0e, 0x1d, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0x70, 0xb7, 0x6a, 0x73, 0xe5, 0x01, 0x00, + 0x00, } func (m *ChildCertificates) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/codec.go b/x/pki/types/codec.go index 46b19d0ef..a7c273a38 100644 --- a/x/pki/types/codec.go +++ b/x/pki/types/codec.go @@ -78,7 +78,7 @@ func RegisterInterfaces(registry cdctypes.InterfaceRegistry) { ) // this line is used by starport scaffolding # 3 - msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) + msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) //nolint:nosnakecase } var ModuleCdc = codec.NewProtoCodec(cdctypes.NewInterfaceRegistry()) diff --git a/x/pki/types/genesis.pb.go b/x/pki/types/genesis.pb.go index 36ceea4fb..1eec71bdc 100644 --- a/x/pki/types/genesis.pb.go +++ b/x/pki/types/genesis.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/genesis.proto +// source: zigbeealliance/distributedcomplianceledger/pki/genesis.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -48,7 +48,7 @@ func (m *GenesisState) Reset() { *m = GenesisState{} } func (m *GenesisState) String() string { return proto.CompactTextString(m) } func (*GenesisState) ProtoMessage() {} func (*GenesisState) Descriptor() ([]byte, []int) { - return fileDescriptor_9478608499b59120, []int{0} + return fileDescriptor_f53eb28747bee8a8, []int{0} } func (m *GenesisState) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -200,60 +200,63 @@ func init() { proto.RegisterType((*GenesisState)(nil), "zigbeealliance.distributedcomplianceledger.pki.GenesisState") } -func init() { proto.RegisterFile("pki/genesis.proto", fileDescriptor_9478608499b59120) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/genesis.proto", fileDescriptor_f53eb28747bee8a8) +} -var fileDescriptor_9478608499b59120 = []byte{ - // 796 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x96, 0x3f, 0x6f, 0xd3, 0x40, - 0x18, 0xc6, 0x73, 0xb4, 0x14, 0x70, 0xcb, 0x9f, 0x5a, 0x05, 0xaa, 0x0a, 0xb9, 0xa5, 0x45, 0x50, - 0x40, 0x4d, 0x44, 0x19, 0x98, 0x93, 0x06, 0x95, 0xf2, 0xa7, 0x54, 0xa9, 0x60, 0x60, 0xc0, 0x72, - 0xce, 0x47, 0x7a, 0x38, 0xf5, 0x19, 0xfb, 0x52, 0x11, 0x24, 0x24, 0x10, 0x08, 0x31, 0x32, 0xb2, - 0x22, 0x21, 0xb1, 0xf2, 0x31, 0x3a, 0x76, 0x64, 0x42, 0xa8, 0xf9, 0x22, 0xc8, 0xaf, 0xcf, 0x8d, - 0x63, 0xdf, 0xa5, 0x89, 0x95, 0x2d, 0xb1, 0xcf, 0xcf, 0xf3, 0xbb, 0xf7, 0x7d, 0x5e, 0x9f, 0xb5, - 0x69, 0xcf, 0xa1, 0xa5, 0x06, 0x71, 0x49, 0x40, 0x83, 0xa2, 0xe7, 0x33, 0xce, 0xf4, 0xe2, 0x3b, - 0xda, 0xa8, 0x13, 0x62, 0x35, 0x9b, 0xd4, 0x72, 0x31, 0x29, 0xda, 0x34, 0xe0, 0x3e, 0xad, 0xb7, - 0x38, 0xb1, 0x31, 0xdb, 0xf5, 0xa2, 0xab, 0x4d, 0x62, 0x37, 0x88, 0x5f, 0xf4, 0x1c, 0x3a, 0x37, - 0x1f, 0x4a, 0x58, 0x9e, 0xe7, 0xb3, 0x3d, 0x62, 0x9b, 0x98, 0xf8, 0x9c, 0xbe, 0xa2, 0xd8, 0xe2, - 0x44, 0x08, 0xce, 0x19, 0xe1, 0x02, 0xcf, 0x67, 0x1e, 0x0b, 0x7a, 0x17, 0x88, 0xfb, 0x57, 0xc2, - 0xfb, 0x78, 0x87, 0x36, 0xa5, 0x4f, 0xdf, 0x54, 0x3d, 0x6d, 0xfa, 0x64, 0x8f, 0x61, 0x8b, 0x53, - 0xe6, 0x26, 0x8d, 0xc2, 0xab, 0x8e, 0x1c, 0x04, 0x8c, 0x5a, 0x2e, 0x7d, 0xd3, 0x22, 0x12, 0x8c, - 0x6b, 0x3d, 0xfb, 0xf0, 0x19, 0xe3, 0x32, 0x8d, 0xa5, 0xa4, 0x87, 0x6a, 0xd1, 0x2d, 0x65, 0x49, - 0xcc, 0x7a, 0xdb, 0x0c, 0x5a, 0xf5, 0xd7, 0x04, 0xf3, 0x5e, 0xe8, 0xf0, 0x8a, 0xb4, 0x3a, 0xb7, - 0x61, 0xff, 0x0e, 0x4d, 0x6c, 0xd7, 0x3c, 0x6a, 0x49, 0xf8, 0xc7, 0x63, 0xd4, 0x8d, 0xc5, 0xee, - 0x0f, 0xb4, 0x18, 0x28, 0x68, 0x10, 0xb4, 0x88, 0x1f, 0xc3, 0x98, 0x0e, 0x69, 0x9b, 0xd4, 0x16, - 0x32, 0x77, 0x06, 0xe1, 0xef, 0x7d, 0x04, 0x52, 0xe0, 0x32, 0xac, 0xac, 0x89, 0x11, 0x2f, 0xa0, - 0xd8, 0x92, 0xdd, 0xbf, 0x91, 0x2c, 0x6c, 0x3f, 0xa1, 0x92, 0xd2, 0x29, 0x84, 0xdb, 0xa3, 0xb6, - 0x69, 0xb9, 0xb6, 0x19, 0x38, 0x47, 0x68, 0x33, 0x0d, 0xd6, 0x60, 0xf0, 0xb3, 0x14, 0xfe, 0x8a, - 0xae, 0x2e, 0x7e, 0x9d, 0xd1, 0xa6, 0xd6, 0xa3, 0xe0, 0x6f, 0x73, 0x8b, 0x13, 0xfd, 0x0b, 0xd2, - 0x66, 0xe3, 0x3d, 0xaf, 0x25, 0x54, 0x1f, 0xd3, 0x80, 0xcf, 0xa2, 0x85, 0xb1, 0xe5, 0xc9, 0xd5, - 0xea, 0x90, 0xb3, 0x51, 0x2c, 0x4b, 0xf4, 0x2a, 0xe3, 0xfb, 0x7f, 0xe7, 0x0b, 0x35, 0xa5, 0x97, - 0xfe, 0x09, 0x69, 0x97, 0xe3, 0xc0, 0x27, 0x6e, 0x02, 0xc7, 0x09, 0xe0, 0x58, 0x1b, 0x96, 0x63, - 0x2b, 0x2b, 0x27, 0x30, 0x54, 0x4e, 0xfa, 0x7b, 0xed, 0x22, 0xcc, 0x64, 0xa6, 0x14, 0x63, 0x80, - 0x50, 0x1e, 0x16, 0x61, 0x2d, 0x2d, 0x26, 0x00, 0xe4, 0x2e, 0xfa, 0x0f, 0xa4, 0x5d, 0x95, 0xa0, - 0xd5, 0x8e, 0x72, 0x0d, 0x2c, 0xe3, 0xc0, 0xf2, 0x64, 0x04, 0xe5, 0xe8, 0x0a, 0x0b, 0xae, 0xe3, - 0xdd, 0xa1, 0x51, 0x22, 0xb1, 0x99, 0x2a, 0x9d, 0xcc, 0xd7, 0xa8, 0x5a, 0x56, 0x2e, 0x6e, 0x94, - 0xc2, 0x29, 0x6c, 0x54, 0xf4, 0x4e, 0x4b, 0x67, 0x65, 0x22, 0x5f, 0xa3, 0x9e, 0xa5, 0xc5, 0xe2, - 0x46, 0x49, 0x5d, 0xf4, 0xcf, 0x89, 0xb1, 0xa9, 0x31, 0xc6, 0x93, 0x7c, 0xb3, 0xa7, 0x16, 0xd0, - 0xf2, 0xe4, 0xea, 0x83, 0xbc, 0x63, 0x93, 0xd6, 0xab, 0x29, 0x9d, 0xf4, 0x8f, 0xdd, 0x5e, 0x64, - 0x28, 0x4e, 0x03, 0xc5, 0x7a, 0xce, 0x5e, 0x64, 0x20, 0x54, 0x3e, 0x90, 0x59, 0xd9, 0x54, 0x57, - 0xda, 0xdb, 0xd1, 0x2b, 0x13, 0xda, 0x72, 0x26, 0x5f, 0x66, 0xcb, 0xfd, 0x84, 0xe3, 0xcc, 0x1e, - 0xeb, 0x2e, 0x32, 0x1b, 0x9d, 0x36, 0xe9, 0xc0, 0x68, 0x79, 0x33, 0x9b, 0x91, 0xeb, 0x66, 0x56, - 0xea, 0xa4, 0xff, 0x44, 0xda, 0xe2, 0x96, 0x43, 0xbb, 0xf3, 0x54, 0x4d, 0x1c, 0x52, 0x5b, 0xe1, - 0x19, 0x05, 0x40, 0x93, 0x00, 0xb4, 0x39, 0xf4, 0x78, 0xf7, 0x55, 0x16, 0x6c, 0x03, 0xf8, 0xeb, - 0x1d, 0xa4, 0xdd, 0xeb, 0xbf, 0x2c, 0xa8, 0xb4, 0x37, 0xe0, 0x20, 0x15, 0x35, 0x7e, 0x44, 0xda, - 0x1b, 0x55, 0x60, 0x9f, 0x02, 0x76, 0x3c, 0x5a, 0x76, 0xa9, 0x9d, 0xd8, 0x50, 0x5e, 0x52, 0xfd, - 0x37, 0xd2, 0xae, 0xf7, 0x0d, 0x4e, 0xb8, 0xd4, 0x86, 0x4d, 0x9d, 0x85, 0x4d, 0xd5, 0x46, 0x9a, - 0x5d, 0x50, 0x17, 0x7b, 0x18, 0x90, 0x03, 0x52, 0xec, 0x32, 0x9c, 0x9e, 0x40, 0x60, 0x3c, 0x97, - 0x2f, 0xc5, 0x9b, 0x59, 0xb9, 0x38, 0xc5, 0x0a, 0x27, 0xfd, 0x03, 0xd2, 0x2e, 0xb9, 0x0c, 0x6f, - 0x60, 0x2b, 0x03, 0x71, 0x1e, 0x20, 0x2a, 0x39, 0x20, 0x52, 0x6a, 0x82, 0x41, 0xe1, 0xa3, 0x7f, - 0x47, 0x9a, 0x21, 0x5e, 0x47, 0x9b, 0x8a, 0x7a, 0x5c, 0x00, 0x94, 0x87, 0x39, 0xdf, 0x7e, 0xea, - 0xb2, 0x1c, 0xe3, 0xab, 0xff, 0x42, 0xda, 0x92, 0xa4, 0x72, 0x95, 0xf6, 0x73, 0x6a, 0x97, 0x5d, - 0x7b, 0xdb, 0xa1, 0x51, 0xa6, 0xa6, 0x81, 0xef, 0xe9, 0x28, 0xfa, 0x95, 0x90, 0x16, 0x90, 0x83, - 0x10, 0x54, 0x5e, 0xee, 0x1f, 0x1a, 0xe8, 0xe0, 0xd0, 0x40, 0xff, 0x0e, 0x0d, 0xf4, 0xad, 0x63, - 0x14, 0x0e, 0x3a, 0x46, 0xe1, 0x4f, 0xc7, 0x28, 0xbc, 0xa8, 0x36, 0x28, 0xdf, 0x69, 0xd5, 0x8b, - 0x98, 0xed, 0x96, 0x22, 0xbe, 0x95, 0x18, 0xb0, 0x94, 0x00, 0x5c, 0xe9, 0x12, 0xae, 0x44, 0x88, - 0xa5, 0xb7, 0xf0, 0x99, 0xca, 0xdb, 0x1e, 0x09, 0xea, 0x13, 0xf0, 0xc5, 0x79, 0xf7, 0x7f, 0x00, - 0x00, 0x00, 0xff, 0xff, 0xf9, 0x5e, 0xe6, 0x31, 0x6d, 0x0d, 0x00, 0x00, +var fileDescriptor_f53eb28747bee8a8 = []byte{ + // 810 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x96, 0xcf, 0x4f, 0xd4, 0x4e, + 0x18, 0xc6, 0x77, 0xbe, 0xf0, 0x45, 0x2d, 0xf8, 0xab, 0x41, 0x25, 0x1c, 0x56, 0xc4, 0xc4, 0x70, + 0x61, 0x37, 0xc1, 0x83, 0x17, 0x2f, 0xbb, 0x6c, 0x82, 0x8b, 0xba, 0x90, 0x45, 0x38, 0x10, 0x63, + 0xd3, 0x9d, 0x8e, 0xcb, 0xd8, 0xa5, 0x53, 0xdb, 0x59, 0xe2, 0x9a, 0x98, 0x68, 0x34, 0xc6, 0xa3, + 0x47, 0xaf, 0x26, 0x26, 0x5e, 0xfd, 0x33, 0x38, 0x72, 0xf4, 0x64, 0x0c, 0xfb, 0x8f, 0x98, 0xbe, + 0x9d, 0xb2, 0xa5, 0x9d, 0x2e, 0x74, 0xd2, 0xdb, 0x52, 0xda, 0xe7, 0xf9, 0xcc, 0xfb, 0x3e, 0xf3, + 0xce, 0x68, 0x0f, 0xdf, 0xd2, 0x6e, 0x87, 0x10, 0xb3, 0xd7, 0xa3, 0xa6, 0x83, 0x49, 0xd5, 0xa2, + 0x3e, 0xf7, 0x68, 0xa7, 0xcf, 0x89, 0x85, 0xd9, 0xbe, 0x1b, 0x3e, 0xed, 0x11, 0xab, 0x4b, 0xbc, + 0xaa, 0x6b, 0xd3, 0x6a, 0x97, 0x38, 0xc4, 0xa7, 0x7e, 0xc5, 0xf5, 0x18, 0x67, 0x7a, 0xe5, 0xf4, + 0xd7, 0x95, 0x31, 0x5f, 0x57, 0x5c, 0x9b, 0xce, 0xaf, 0xe7, 0x74, 0x33, 0x5d, 0xd7, 0x63, 0x07, + 0xc4, 0x32, 0x30, 0xf1, 0x38, 0x7d, 0x49, 0xb1, 0xc9, 0x89, 0xf0, 0x9e, 0x6f, 0xe6, 0xd4, 0x72, + 0x3d, 0xe6, 0x32, 0xff, 0xb4, 0x96, 0x90, 0x5a, 0xcb, 0x29, 0x85, 0xf7, 0x68, 0x4f, 0xca, 0xf4, + 0xac, 0x00, 0x26, 0xc3, 0x23, 0x07, 0x0c, 0x9b, 0x9c, 0x32, 0x47, 0x71, 0xa5, 0x81, 0x80, 0x2d, + 0x2f, 0x5a, 0xde, 0x95, 0xf6, 0x1d, 0xfa, 0xba, 0x4f, 0x24, 0x25, 0xdb, 0x50, 0xed, 0xa4, 0xc7, + 0x18, 0x97, 0x91, 0xb5, 0x14, 0x17, 0x99, 0xa5, 0xb7, 0x5d, 0x44, 0xd4, 0x8c, 0xce, 0xc0, 0xf0, + 0xfb, 0x9d, 0x57, 0x04, 0x73, 0xe5, 0x5e, 0x04, 0x1f, 0x4b, 0x53, 0xb7, 0x93, 0x37, 0x2c, 0x36, + 0x8d, 0x65, 0xc3, 0x38, 0x79, 0x3d, 0xf8, 0xc3, 0x65, 0xd4, 0x89, 0x10, 0xdd, 0xa2, 0x75, 0xa1, + 0x0c, 0xd4, 0xf7, 0xfb, 0xc4, 0x8b, 0xaa, 0x61, 0xd8, 0x64, 0x60, 0x50, 0x4b, 0x38, 0x3e, 0x2f, + 0xb8, 0xd6, 0xa7, 0xd5, 0xf3, 0x0e, 0x0d, 0x87, 0xe1, 0xcc, 0x54, 0x34, 0x15, 0xb4, 0x28, 0x36, + 0x65, 0x52, 0x6d, 0xc5, 0xc0, 0x8e, 0xc3, 0xdb, 0x2d, 0x62, 0xa9, 0x41, 0x21, 0x0f, 0xa8, 0x65, + 0x98, 0x8e, 0x65, 0xf8, 0xf6, 0x49, 0x19, 0x67, 0xbb, 0xac, 0xcb, 0xe0, 0x67, 0x35, 0xf8, 0x15, + 0x3e, 0x5d, 0xfc, 0x32, 0xab, 0xcd, 0xac, 0x85, 0x33, 0x7d, 0x8b, 0x9b, 0x9c, 0xe8, 0x9f, 0x91, + 0x36, 0x17, 0xf5, 0x67, 0x35, 0xa6, 0xfa, 0x84, 0xfa, 0x7c, 0x0e, 0x2d, 0x4c, 0x2c, 0x4d, 0xaf, + 0x34, 0x72, 0x8e, 0xfd, 0x4a, 0x4d, 0xa2, 0x57, 0x9f, 0x3c, 0xfc, 0x73, 0xbb, 0xd4, 0xce, 0xf4, + 0xd2, 0x3f, 0x22, 0xed, 0x56, 0x34, 0x1f, 0x63, 0xff, 0x04, 0x8e, 0xff, 0x80, 0x63, 0x35, 0x2f, + 0xc7, 0x66, 0x5a, 0x4e, 0x60, 0x64, 0x39, 0xe9, 0xef, 0xb4, 0x1b, 0x30, 0xed, 0x53, 0xa5, 0x98, + 0x00, 0x84, 0x5a, 0x5e, 0x84, 0xd5, 0xa4, 0x98, 0x00, 0x90, 0xbb, 0xe8, 0xdf, 0x91, 0x76, 0x47, + 0x82, 0xd6, 0x3e, 0xd9, 0xae, 0xc0, 0x32, 0x09, 0x2c, 0x4f, 0x0b, 0x28, 0xc7, 0x48, 0x58, 0x70, + 0x9d, 0xed, 0x0e, 0x8d, 0x12, 0xe1, 0x4e, 0x55, 0xe9, 0x7f, 0xb5, 0x46, 0xb5, 0xd3, 0x72, 0x51, + 0xa3, 0x32, 0x9c, 0x82, 0x46, 0x85, 0x87, 0x55, 0x32, 0x2b, 0x53, 0x6a, 0x8d, 0xda, 0x4e, 0x8a, + 0x45, 0x8d, 0x92, 0xba, 0xe8, 0x9f, 0x62, 0xdb, 0xa6, 0xcd, 0x18, 0x8f, 0xf3, 0xcd, 0x5d, 0x58, + 0x40, 0x4b, 0xd3, 0x2b, 0x8f, 0x54, 0xb7, 0x4d, 0x52, 0xaf, 0x9d, 0xe9, 0xa4, 0x7f, 0x18, 0xf5, + 0x22, 0x45, 0x71, 0x11, 0x28, 0xd6, 0x14, 0x7b, 0x91, 0x82, 0xc8, 0xf2, 0x81, 0xcc, 0xca, 0x76, + 0x75, 0x7d, 0xb0, 0x15, 0x8e, 0x77, 0x68, 0xcb, 0x25, 0xb5, 0xcc, 0xd6, 0xc6, 0x09, 0x47, 0x99, + 0x3d, 0xd3, 0x5d, 0x64, 0x36, 0x3c, 0x9a, 0x93, 0x81, 0xd1, 0x54, 0x33, 0x9b, 0x92, 0x1b, 0x65, + 0x56, 0xea, 0xa4, 0xff, 0x40, 0xda, 0xe2, 0xa6, 0x4d, 0x47, 0xfb, 0xa9, 0x11, 0x3b, 0x7b, 0x37, + 0x83, 0xa3, 0x17, 0x80, 0xa6, 0x01, 0xa8, 0x95, 0x7b, 0x7b, 0x8f, 0x55, 0x16, 0x6c, 0xe7, 0xf0, + 0xd7, 0x87, 0x48, 0x7b, 0x30, 0xfe, 0x35, 0xbf, 0x3e, 0x68, 0xc2, 0xfd, 0x40, 0xd4, 0xf8, 0x31, + 0x19, 0x34, 0x1b, 0xc0, 0x3e, 0x03, 0xec, 0xb8, 0x58, 0x76, 0xa9, 0x9d, 0x58, 0x90, 0x2a, 0xa9, + 0xfe, 0x0b, 0x69, 0xf7, 0xc6, 0x06, 0x27, 0x78, 0xd5, 0x82, 0x45, 0x5d, 0x86, 0x45, 0xb5, 0x0b, + 0xcd, 0x2e, 0xa8, 0x8b, 0x35, 0x9c, 0x93, 0x03, 0x52, 0xec, 0x30, 0x9c, 0xdc, 0x81, 0xc0, 0x78, + 0x45, 0x2d, 0xc5, 0xad, 0xb4, 0x5c, 0x94, 0xe2, 0x0c, 0x27, 0xfd, 0x3d, 0xd2, 0x6e, 0x3a, 0x0c, + 0x37, 0xb1, 0x99, 0x82, 0xb8, 0x0a, 0x10, 0x75, 0x05, 0x88, 0x84, 0x9a, 0x60, 0xc8, 0xf0, 0xd1, + 0xbf, 0x21, 0xad, 0x2c, 0xc6, 0x51, 0x2b, 0xa3, 0x1e, 0xd7, 0x00, 0x65, 0x5d, 0x71, 0xfa, 0x65, + 0x97, 0xe5, 0x0c, 0x5f, 0xfd, 0x27, 0xd2, 0xee, 0x4a, 0x2a, 0x57, 0x1f, 0xec, 0x50, 0xab, 0xe6, + 0x58, 0x5b, 0x36, 0x0d, 0x33, 0x75, 0x1d, 0xf8, 0x36, 0x8a, 0xe8, 0x57, 0x4c, 0x5a, 0x40, 0x9e, + 0x87, 0xa0, 0xfe, 0xe2, 0xf0, 0xb8, 0x8c, 0x8e, 0x8e, 0xcb, 0xe8, 0xef, 0x71, 0x19, 0x7d, 0x1d, + 0x96, 0x4b, 0x47, 0xc3, 0x72, 0xe9, 0xf7, 0xb0, 0x5c, 0xda, 0x6d, 0x74, 0x29, 0xdf, 0xeb, 0x77, + 0x2a, 0x98, 0xed, 0x57, 0x43, 0xbe, 0x65, 0xd9, 0x15, 0x75, 0x79, 0x44, 0xb8, 0x2c, 0x2e, 0xa9, + 0x6f, 0xe0, 0x9a, 0xca, 0x07, 0x2e, 0xf1, 0x3b, 0x53, 0x70, 0xe3, 0xbc, 0xff, 0x2f, 0x00, 0x00, + 0xff, 0xff, 0x8f, 0x4b, 0xfe, 0xd1, 0x73, 0x10, 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/grant.pb.go b/x/pki/types/grant.pb.go index 932adbb53..404c26d10 100644 --- a/x/pki/types/grant.pb.go +++ b/x/pki/types/grant.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/grant.proto +// source: zigbeealliance/distributedcomplianceledger/pki/grant.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -33,7 +33,7 @@ func (m *Grant) Reset() { *m = Grant{} } func (m *Grant) String() string { return proto.CompactTextString(m) } func (*Grant) ProtoMessage() {} func (*Grant) Descriptor() ([]byte, []int) { - return fileDescriptor_e48633ba4b90fc0a, []int{0} + return fileDescriptor_046cecf95bd45d05, []int{0} } func (m *Grant) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -87,26 +87,28 @@ func init() { proto.RegisterType((*Grant)(nil), "zigbeealliance.distributedcomplianceledger.pki.Grant") } -func init() { proto.RegisterFile("pki/grant.proto", fileDescriptor_e48633ba4b90fc0a) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/grant.proto", fileDescriptor_046cecf95bd45d05) +} -var fileDescriptor_e48633ba4b90fc0a = []byte{ - // 246 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x2f, 0xc8, 0xce, 0xd4, - 0x4f, 0x2f, 0x4a, 0xcc, 0x2b, 0xd1, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xd2, 0xab, 0xca, 0x4c, - 0x4f, 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, - 0x29, 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, - 0xa4, 0xa7, 0x16, 0xe9, 0x15, 0x64, 0x67, 0x4a, 0x49, 0x26, 0xe7, 0x17, 0xe7, 0xe6, 0x17, 0xc7, - 0x83, 0x75, 0xeb, 0x43, 0x38, 0x10, 0xa3, 0x94, 0x92, 0xb9, 0x58, 0xdd, 0x41, 0x26, 0x0b, 0x19, - 0x71, 0xb1, 0x27, 0xa6, 0xa4, 0x14, 0xa5, 0x16, 0x17, 0x4b, 0x30, 0x2a, 0x30, 0x6a, 0x70, 0x3a, - 0x49, 0x5c, 0xda, 0xa2, 0x2b, 0x02, 0x55, 0xeb, 0x08, 0x91, 0x09, 0x2e, 0x29, 0xca, 0xcc, 0x4b, - 0x0f, 0x82, 0x29, 0x14, 0x12, 0xe2, 0x62, 0x29, 0xc9, 0xcc, 0x4d, 0x95, 0x60, 0x52, 0x60, 0xd4, - 0x60, 0x0e, 0x02, 0xb3, 0x41, 0x62, 0x99, 0x79, 0x69, 0xf9, 0x12, 0xcc, 0x20, 0x43, 0x82, 0xc0, - 0x6c, 0xa7, 0xb8, 0x13, 0x8f, 0xe4, 0x18, 0x2f, 0x3c, 0x92, 0x63, 0x7c, 0xf0, 0x48, 0x8e, 0x71, - 0xc2, 0x63, 0x39, 0x86, 0x0b, 0x8f, 0xe5, 0x18, 0x6e, 0x3c, 0x96, 0x63, 0x88, 0x72, 0x49, 0xcf, - 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x87, 0x78, 0x4a, 0x17, 0xe6, 0x2b, 0x7d, - 0x24, 0x5f, 0xe9, 0x22, 0xbc, 0xa5, 0x0b, 0xf1, 0x97, 0x7e, 0x85, 0x3e, 0x28, 0x54, 0x4a, 0x2a, - 0x0b, 0x52, 0x8b, 0x93, 0xd8, 0xc0, 0x7e, 0x31, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0xf0, 0x1d, - 0xea, 0x1b, 0x29, 0x01, 0x00, 0x00, +var fileDescriptor_046cecf95bd45d05 = []byte{ + // 248 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0xaa, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x17, 0x64, 0x67, 0xea, 0xa7, 0x17, 0x25, 0xe6, 0x95, 0xe8, 0x15, 0x14, 0xe5, + 0x97, 0xe4, 0x0b, 0xe9, 0xa1, 0xea, 0xd5, 0xc3, 0xa3, 0x57, 0xaf, 0x20, 0x3b, 0x53, 0x4a, 0x32, + 0x39, 0xbf, 0x38, 0x37, 0xbf, 0x38, 0x1e, 0xac, 0x5b, 0x1f, 0xc2, 0x81, 0x18, 0xa5, 0x94, 0xcc, + 0xc5, 0xea, 0x0e, 0x32, 0x59, 0xc8, 0x88, 0x8b, 0x3d, 0x31, 0x25, 0xa5, 0x28, 0xb5, 0xb8, 0x58, + 0x82, 0x51, 0x81, 0x51, 0x83, 0xd3, 0x49, 0xe2, 0xd2, 0x16, 0x5d, 0x11, 0xa8, 0x5a, 0x47, 0x88, + 0x4c, 0x70, 0x49, 0x51, 0x66, 0x5e, 0x7a, 0x10, 0x4c, 0xa1, 0x90, 0x10, 0x17, 0x4b, 0x49, 0x66, + 0x6e, 0xaa, 0x04, 0x93, 0x02, 0xa3, 0x06, 0x73, 0x10, 0x98, 0x0d, 0x12, 0xcb, 0xcc, 0x4b, 0xcb, + 0x97, 0x60, 0x06, 0x19, 0x12, 0x04, 0x66, 0x3b, 0xc5, 0x9d, 0x78, 0x24, 0xc7, 0x78, 0xe1, 0x91, + 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x13, 0x1e, 0xcb, 0x31, 0x5c, 0x78, 0x2c, 0xc7, 0x70, 0xe3, + 0xb1, 0x1c, 0x43, 0x94, 0x4b, 0x7a, 0x66, 0x49, 0x46, 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x3e, + 0xc4, 0x53, 0xba, 0xd8, 0x42, 0x44, 0x17, 0xe1, 0x2d, 0x5d, 0x68, 0x98, 0x54, 0x80, 0x43, 0xa5, + 0xa4, 0xb2, 0x20, 0xb5, 0x38, 0x89, 0x0d, 0xec, 0x17, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x48, 0x59, 0x12, 0xcb, 0x54, 0x01, 0x00, 0x00, } func (m *Grant) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/message_add_noc_x_509_ica_cert.go b/x/pki/types/message_add_noc_x_509_ica_cert.go index f76e50ef7..26124b1a5 100644 --- a/x/pki/types/message_add_noc_x_509_ica_cert.go +++ b/x/pki/types/message_add_noc_x_509_ica_cert.go @@ -1,6 +1,7 @@ package types import ( + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -47,7 +48,7 @@ func (msg *MsgAddNocX509IcaCert) GetSignBytes() []byte { func (msg *MsgAddNocX509IcaCert) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) diff --git a/x/pki/types/message_add_noc_x_509_ica_cert_test.go b/x/pki/types/message_add_noc_x_509_ica_cert_test.go index 81223d4e6..307578b2f 100644 --- a/x/pki/types/message_add_noc_x_509_ica_cert_test.go +++ b/x/pki/types/message_add_noc_x_509_ica_cert_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" diff --git a/x/pki/types/message_add_noc_x_509_root_cert.go b/x/pki/types/message_add_noc_x_509_root_cert.go index e1fc267c5..ee6aa7180 100644 --- a/x/pki/types/message_add_noc_x_509_root_cert.go +++ b/x/pki/types/message_add_noc_x_509_root_cert.go @@ -1,6 +1,7 @@ package types import ( + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -47,7 +48,7 @@ func (msg *MsgAddNocX509RootCert) GetSignBytes() []byte { func (msg *MsgAddNocX509RootCert) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) diff --git a/x/pki/types/message_add_noc_x_509_root_cert_test.go b/x/pki/types/message_add_noc_x_509_root_cert_test.go index 5f26b1004..e45dac756 100644 --- a/x/pki/types/message_add_noc_x_509_root_cert_test.go +++ b/x/pki/types/message_add_noc_x_509_root_cert_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" diff --git a/x/pki/types/message_add_x_509_cert.go b/x/pki/types/message_add_x_509_cert.go index e088c3147..640c167c6 100644 --- a/x/pki/types/message_add_x_509_cert.go +++ b/x/pki/types/message_add_x_509_cert.go @@ -1,6 +1,7 @@ package types import ( + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -46,7 +47,7 @@ func (msg *MsgAddX509Cert) GetSignBytes() []byte { func (msg *MsgAddX509Cert) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) diff --git a/x/pki/types/message_add_x_509_cert_test.go b/x/pki/types/message_add_x_509_cert_test.go index 304fa3788..3112f6e08 100644 --- a/x/pki/types/message_add_x_509_cert_test.go +++ b/x/pki/types/message_add_x_509_cert_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" diff --git a/x/pki/types/message_approve_add_x_509_root_cert.go b/x/pki/types/message_approve_add_x_509_root_cert.go index a2244ff96..18328bbcd 100644 --- a/x/pki/types/message_approve_add_x_509_root_cert.go +++ b/x/pki/types/message_approve_add_x_509_root_cert.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -49,7 +50,7 @@ func (msg *MsgApproveAddX509RootCert) GetSignBytes() []byte { func (msg *MsgApproveAddX509RootCert) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) diff --git a/x/pki/types/message_approve_add_x_509_root_cert_test.go b/x/pki/types/message_approve_add_x_509_root_cert_test.go index 3c8bf4653..18cdff2fc 100644 --- a/x/pki/types/message_approve_add_x_509_root_cert_test.go +++ b/x/pki/types/message_approve_add_x_509_root_cert_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" diff --git a/x/pki/types/message_approve_revoke_x_509_root_cert.go b/x/pki/types/message_approve_revoke_x_509_root_cert.go index 569aa2c8f..7496d35ee 100644 --- a/x/pki/types/message_approve_revoke_x_509_root_cert.go +++ b/x/pki/types/message_approve_revoke_x_509_root_cert.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -50,7 +51,7 @@ func (msg *MsgApproveRevokeX509RootCert) GetSignBytes() []byte { func (msg *MsgApproveRevokeX509RootCert) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) diff --git a/x/pki/types/message_approve_revoke_x_509_root_cert_test.go b/x/pki/types/message_approve_revoke_x_509_root_cert_test.go index 8f2e67680..d6a5cbee5 100644 --- a/x/pki/types/message_approve_revoke_x_509_root_cert_test.go +++ b/x/pki/types/message_approve_revoke_x_509_root_cert_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" diff --git a/x/pki/types/message_assign_vid.go b/x/pki/types/message_assign_vid.go index 2b641e5f0..6f36aedc8 100644 --- a/x/pki/types/message_assign_vid.go +++ b/x/pki/types/message_assign_vid.go @@ -1,6 +1,7 @@ package types import ( + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -46,7 +47,7 @@ func (msg *MsgAssignVid) GetSignBytes() []byte { func (msg *MsgAssignVid) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) diff --git a/x/pki/types/message_assign_vid_test.go b/x/pki/types/message_assign_vid_test.go index 500dfc5d5..b9d0b85e9 100644 --- a/x/pki/types/message_assign_vid_test.go +++ b/x/pki/types/message_assign_vid_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" diff --git a/x/pki/types/message_propose_add_x_509_root_cert.go b/x/pki/types/message_propose_add_x_509_root_cert.go index 4aa3b6da2..c1d57aa6c 100644 --- a/x/pki/types/message_propose_add_x_509_root_cert.go +++ b/x/pki/types/message_propose_add_x_509_root_cert.go @@ -4,6 +4,7 @@ import ( fmt "fmt" "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -53,7 +54,7 @@ func (msg *MsgProposeAddX509RootCert) GetSignBytes() []byte { func (msg *MsgProposeAddX509RootCert) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) diff --git a/x/pki/types/message_propose_add_x_509_root_cert_test.go b/x/pki/types/message_propose_add_x_509_root_cert_test.go index 7af3e889a..c48c33066 100644 --- a/x/pki/types/message_propose_add_x_509_root_cert_test.go +++ b/x/pki/types/message_propose_add_x_509_root_cert_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" diff --git a/x/pki/types/message_propose_revoke_x_509_root_cert.go b/x/pki/types/message_propose_revoke_x_509_root_cert.go index ba664ed79..061ef3c2c 100644 --- a/x/pki/types/message_propose_revoke_x_509_root_cert.go +++ b/x/pki/types/message_propose_revoke_x_509_root_cert.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -60,7 +61,7 @@ func (msg *MsgProposeRevokeX509RootCert) GetSignBytes() []byte { func (msg *MsgProposeRevokeX509RootCert) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) diff --git a/x/pki/types/message_propose_revoke_x_509_root_cert_test.go b/x/pki/types/message_propose_revoke_x_509_root_cert_test.go index b0f2825f4..6144c781f 100644 --- a/x/pki/types/message_propose_revoke_x_509_root_cert_test.go +++ b/x/pki/types/message_propose_revoke_x_509_root_cert_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" diff --git a/x/pki/types/message_reject_add_x_509_root_cert.go b/x/pki/types/message_reject_add_x_509_root_cert.go index 109fa98a1..5ca56c84e 100644 --- a/x/pki/types/message_reject_add_x_509_root_cert.go +++ b/x/pki/types/message_reject_add_x_509_root_cert.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -50,7 +51,7 @@ func (msg *MsgRejectAddX509RootCert) GetSignBytes() []byte { func (msg *MsgRejectAddX509RootCert) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) diff --git a/x/pki/types/message_reject_add_x_509_root_cert_test.go b/x/pki/types/message_reject_add_x_509_root_cert_test.go index f360748c0..427b12e21 100644 --- a/x/pki/types/message_reject_add_x_509_root_cert_test.go +++ b/x/pki/types/message_reject_add_x_509_root_cert_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" diff --git a/x/pki/types/message_remove_x_509_cert.go b/x/pki/types/message_remove_x_509_cert.go index ead83a44d..ecfb28eda 100644 --- a/x/pki/types/message_remove_x_509_cert.go +++ b/x/pki/types/message_remove_x_509_cert.go @@ -1,6 +1,7 @@ package types import ( + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -46,7 +47,7 @@ func (msg *MsgRemoveX509Cert) GetSignBytes() []byte { func (msg *MsgRemoveX509Cert) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) diff --git a/x/pki/types/message_remove_x_509_cert_test.go b/x/pki/types/message_remove_x_509_cert_test.go index 8b3fc65fd..7c9835f1a 100644 --- a/x/pki/types/message_remove_x_509_cert_test.go +++ b/x/pki/types/message_remove_x_509_cert_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" diff --git a/x/pki/types/message_revoke_noc_x_509_ica_cert.go b/x/pki/types/message_revoke_noc_x_509_ica_cert.go index bd08179c1..3f2c585cb 100644 --- a/x/pki/types/message_revoke_noc_x_509_ica_cert.go +++ b/x/pki/types/message_revoke_noc_x_509_ica_cert.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -52,7 +53,7 @@ func (msg *MsgRevokeNocX509IcaCert) GetSignBytes() []byte { func (msg *MsgRevokeNocX509IcaCert) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) diff --git a/x/pki/types/message_revoke_noc_x_509_ica_cert_test.go b/x/pki/types/message_revoke_noc_x_509_ica_cert_test.go index 2b99a279a..d675c05b0 100644 --- a/x/pki/types/message_revoke_noc_x_509_ica_cert_test.go +++ b/x/pki/types/message_revoke_noc_x_509_ica_cert_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" diff --git a/x/pki/types/message_revoke_noc_x_509_root_cert.go b/x/pki/types/message_revoke_noc_x_509_root_cert.go index c8b0e95c3..d59b9ea47 100644 --- a/x/pki/types/message_revoke_noc_x_509_root_cert.go +++ b/x/pki/types/message_revoke_noc_x_509_root_cert.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -52,7 +53,7 @@ func (msg *MsgRevokeNocX509RootCert) GetSignBytes() []byte { func (msg *MsgRevokeNocX509RootCert) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) diff --git a/x/pki/types/message_revoke_noc_x_509_root_cert_test.go b/x/pki/types/message_revoke_noc_x_509_root_cert_test.go index fb15f4cfd..0f6033f9a 100644 --- a/x/pki/types/message_revoke_noc_x_509_root_cert_test.go +++ b/x/pki/types/message_revoke_noc_x_509_root_cert_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" diff --git a/x/pki/types/message_revoke_x_509_cert.go b/x/pki/types/message_revoke_x_509_cert.go index 46e43cba3..b5438018c 100644 --- a/x/pki/types/message_revoke_x_509_cert.go +++ b/x/pki/types/message_revoke_x_509_cert.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" pkitypes "github.com/zigbee-alliance/distributed-compliance-ledger/types/pki" @@ -60,7 +61,7 @@ func (msg *MsgRevokeX509Cert) GetSignBytes() []byte { func (msg *MsgRevokeX509Cert) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } err = validator.Validate(msg) diff --git a/x/pki/types/message_revoke_x_509_cert_test.go b/x/pki/types/message_revoke_x_509_cert_test.go index 769d4354f..e61614f31 100644 --- a/x/pki/types/message_revoke_x_509_cert_test.go +++ b/x/pki/types/message_revoke_x_509_cert_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" diff --git a/x/pki/types/noc_ica_certificates.pb.go b/x/pki/types/noc_ica_certificates.pb.go index e0e0fc5dd..4e8213715 100644 --- a/x/pki/types/noc_ica_certificates.pb.go +++ b/x/pki/types/noc_ica_certificates.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/noc_ica_certificates.proto +// source: zigbeealliance/distributedcomplianceledger/pki/noc_ica_certificates.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -32,7 +32,7 @@ func (m *NocIcaCertificates) Reset() { *m = NocIcaCertificates{} } func (m *NocIcaCertificates) String() string { return proto.CompactTextString(m) } func (*NocIcaCertificates) ProtoMessage() {} func (*NocIcaCertificates) Descriptor() ([]byte, []int) { - return fileDescriptor_e75097c165df3607, []int{0} + return fileDescriptor_59301d987fd5414e, []int{0} } func (m *NocIcaCertificates) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -79,28 +79,30 @@ func init() { proto.RegisterType((*NocIcaCertificates)(nil), "zigbeealliance.distributedcomplianceledger.pki.NocIcaCertificates") } -func init() { proto.RegisterFile("pki/noc_ica_certificates.proto", fileDescriptor_e75097c165df3607) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/noc_ica_certificates.proto", fileDescriptor_59301d987fd5414e) +} -var fileDescriptor_e75097c165df3607 = []byte{ - // 279 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x2b, 0xc8, 0xce, 0xd4, - 0xcf, 0xcb, 0x4f, 0x8e, 0xcf, 0x4c, 0x4e, 0x8c, 0x4f, 0x4e, 0x2d, 0x2a, 0xc9, 0x4c, 0xcb, 0x4c, - 0x4e, 0x2c, 0x49, 0x2d, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xd2, 0xab, 0xca, 0x4c, 0x4f, - 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, +var fileDescriptor_59301d987fd5414e = []byte{ + // 282 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xf2, 0xac, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, - 0xa7, 0x16, 0xe9, 0x15, 0x64, 0x67, 0x4a, 0x89, 0xa4, 0xe7, 0xa7, 0xe7, 0x83, 0xb5, 0xea, 0x83, - 0x58, 0x10, 0x53, 0xa4, 0x44, 0x41, 0xb6, 0x20, 0x99, 0x0e, 0x11, 0x56, 0x9a, 0xc9, 0xc8, 0x25, - 0xe4, 0x97, 0x9f, 0xec, 0x99, 0x9c, 0xe8, 0x8c, 0x64, 0xb3, 0x90, 0x01, 0x17, 0x73, 0x59, 0x66, - 0x8a, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0xab, 0x93, 0xdc, 0xa7, 0x7b, 0xf2, 0x52, 0x65, 0x89, 0x39, - 0x99, 0x29, 0x89, 0x25, 0xa9, 0x56, 0x4a, 0xe9, 0x25, 0xa9, 0xb6, 0x86, 0x3a, 0x39, 0x25, 0xa9, - 0xb6, 0x66, 0xa6, 0xa6, 0xc6, 0xa6, 0x4a, 0x41, 0x20, 0xa5, 0x42, 0x81, 0x5c, 0xac, 0x20, 0xd3, - 0x8b, 0x25, 0x98, 0x14, 0x98, 0x35, 0xb8, 0x8d, 0xac, 0x49, 0x74, 0xb5, 0x1e, 0x92, 0xf5, 0x41, - 0x10, 0x93, 0x9c, 0xe2, 0x4e, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, - 0xc6, 0x09, 0x8f, 0xe5, 0x18, 0x2e, 0x3c, 0x96, 0x63, 0xb8, 0xf1, 0x58, 0x8e, 0x21, 0xca, 0x25, - 0x3d, 0xb3, 0x24, 0xa3, 0x34, 0x49, 0x2f, 0x39, 0x3f, 0x57, 0x1f, 0x62, 0x8f, 0x2e, 0xcc, 0x22, - 0x7d, 0x24, 0x8b, 0x74, 0x11, 0x36, 0xe9, 0x42, 0xac, 0xd2, 0xaf, 0xd0, 0x07, 0x85, 0x43, 0x49, - 0x65, 0x41, 0x6a, 0x71, 0x12, 0x1b, 0x38, 0x08, 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x6a, - 0x4c, 0x47, 0x3c, 0x81, 0x01, 0x00, 0x00, + 0xa7, 0x16, 0xe9, 0x17, 0x64, 0x67, 0xea, 0xe7, 0xe5, 0x27, 0xc7, 0x67, 0x26, 0x27, 0xc6, 0x27, + 0xa7, 0x16, 0x95, 0x64, 0xa6, 0x65, 0x26, 0x27, 0x96, 0xa4, 0x16, 0xeb, 0x15, 0x14, 0xe5, 0x97, + 0xe4, 0x0b, 0xe9, 0xa1, 0x1a, 0xa5, 0x87, 0xc7, 0x28, 0xbd, 0x82, 0xec, 0x4c, 0x29, 0x91, 0xf4, + 0xfc, 0xf4, 0x7c, 0xb0, 0x56, 0x7d, 0x10, 0x0b, 0x62, 0x8a, 0x94, 0x03, 0x89, 0x0e, 0x42, 0x72, + 0x08, 0xc4, 0x04, 0xa5, 0x99, 0x8c, 0x5c, 0x42, 0x7e, 0xf9, 0xc9, 0x9e, 0xc9, 0x89, 0xce, 0x48, + 0x8e, 0x14, 0x32, 0xe0, 0x62, 0x2e, 0xcb, 0x4c, 0x91, 0x60, 0x54, 0x60, 0xd4, 0x60, 0x75, 0x92, + 0xfb, 0x74, 0x4f, 0x5e, 0xaa, 0x2c, 0x31, 0x27, 0x33, 0x25, 0xb1, 0x24, 0xd5, 0x4a, 0x29, 0xbd, + 0x24, 0xd5, 0xd6, 0x50, 0x27, 0xa7, 0x24, 0xd5, 0xd6, 0xcc, 0xd4, 0xd4, 0xd8, 0x54, 0x29, 0x08, + 0xa4, 0x54, 0x28, 0x90, 0x8b, 0x15, 0x64, 0x7a, 0xb1, 0x04, 0x93, 0x02, 0xb3, 0x06, 0xb7, 0x91, + 0x35, 0x89, 0x1e, 0xd4, 0x43, 0xb2, 0x3e, 0x08, 0x62, 0x92, 0x53, 0xdc, 0x89, 0x47, 0x72, 0x8c, + 0x17, 0x1e, 0xc9, 0x31, 0x3e, 0x78, 0x24, 0xc7, 0x38, 0xe1, 0xb1, 0x1c, 0xc3, 0x85, 0xc7, 0x72, + 0x0c, 0x37, 0x1e, 0xcb, 0x31, 0x44, 0xb9, 0xa4, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, + 0xe7, 0xea, 0x43, 0xec, 0xd1, 0xc5, 0x16, 0x06, 0xba, 0x08, 0x9b, 0x74, 0xa1, 0xa1, 0x50, 0x01, + 0x0e, 0x87, 0x92, 0xca, 0x82, 0xd4, 0xe2, 0x24, 0x36, 0x70, 0x10, 0x18, 0x03, 0x02, 0x00, 0x00, + 0xff, 0xff, 0xbf, 0x0d, 0x78, 0x1d, 0xd7, 0x01, 0x00, 0x00, } func (m *NocIcaCertificates) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/noc_root_certificates.pb.go b/x/pki/types/noc_root_certificates.pb.go index e937b4c7d..671133d10 100644 --- a/x/pki/types/noc_root_certificates.pb.go +++ b/x/pki/types/noc_root_certificates.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/noc_root_certificates.proto +// source: zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -32,7 +32,7 @@ func (m *NocRootCertificates) Reset() { *m = NocRootCertificates{} } func (m *NocRootCertificates) String() string { return proto.CompactTextString(m) } func (*NocRootCertificates) ProtoMessage() {} func (*NocRootCertificates) Descriptor() ([]byte, []int) { - return fileDescriptor_e99953abf8ab8434, []int{0} + return fileDescriptor_c641c0880cd46de8, []int{0} } func (m *NocRootCertificates) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -79,28 +79,30 @@ func init() { proto.RegisterType((*NocRootCertificates)(nil), "zigbeealliance.distributedcomplianceledger.pki.NocRootCertificates") } -func init() { proto.RegisterFile("pki/noc_root_certificates.proto", fileDescriptor_e99953abf8ab8434) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates.proto", fileDescriptor_c641c0880cd46de8) +} -var fileDescriptor_e99953abf8ab8434 = []byte{ - // 281 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x2f, 0xc8, 0xce, 0xd4, - 0xcf, 0xcb, 0x4f, 0x8e, 0x2f, 0xca, 0xcf, 0x2f, 0x89, 0x4f, 0x4e, 0x2d, 0x2a, 0xc9, 0x4c, 0xcb, - 0x4c, 0x4e, 0x2c, 0x49, 0x2d, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xd2, 0xab, 0xca, 0x4c, - 0x4f, 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, - 0x29, 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, - 0xa4, 0xa7, 0x16, 0xe9, 0x15, 0x64, 0x67, 0x4a, 0x89, 0xa4, 0xe7, 0xa7, 0xe7, 0x83, 0xb5, 0xea, - 0x83, 0x58, 0x10, 0x53, 0xa4, 0x44, 0x41, 0xd6, 0x20, 0x99, 0x0e, 0x11, 0x56, 0x9a, 0xc5, 0xc8, - 0x25, 0xec, 0x97, 0x9f, 0x1c, 0x94, 0x9f, 0x5f, 0xe2, 0x8c, 0x64, 0xb5, 0x90, 0x01, 0x17, 0x73, - 0x59, 0x66, 0x8a, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0xab, 0x93, 0xdc, 0xa7, 0x7b, 0xf2, 0x52, 0x65, - 0x89, 0x39, 0x99, 0x29, 0x89, 0x25, 0xa9, 0x56, 0x4a, 0xe9, 0x25, 0xa9, 0xb6, 0x86, 0x3a, 0x39, - 0x25, 0xa9, 0xb6, 0x66, 0xa6, 0xa6, 0xc6, 0xa6, 0x4a, 0x41, 0x20, 0xa5, 0x42, 0x81, 0x5c, 0xac, - 0x20, 0xe3, 0x8b, 0x25, 0x98, 0x14, 0x98, 0x35, 0xb8, 0x8d, 0xac, 0x49, 0x74, 0xb6, 0x1e, 0x92, - 0xf5, 0x41, 0x10, 0x93, 0x9c, 0xe2, 0x4e, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, - 0x23, 0x39, 0xc6, 0x09, 0x8f, 0xe5, 0x18, 0x2e, 0x3c, 0x96, 0x63, 0xb8, 0xf1, 0x58, 0x8e, 0x21, - 0xca, 0x25, 0x3d, 0xb3, 0x24, 0xa3, 0x34, 0x49, 0x2f, 0x39, 0x3f, 0x57, 0x1f, 0x62, 0x8f, 0x2e, - 0xcc, 0x22, 0x7d, 0x24, 0x8b, 0x74, 0x11, 0x36, 0xe9, 0x42, 0xac, 0xd2, 0xaf, 0xd0, 0x07, 0x05, - 0x44, 0x49, 0x65, 0x41, 0x6a, 0x71, 0x12, 0x1b, 0x38, 0x0c, 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, - 0xff, 0x4d, 0x71, 0x41, 0xdc, 0x83, 0x01, 0x00, 0x00, +var fileDescriptor_c641c0880cd46de8 = []byte{ + // 284 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xf2, 0xaa, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x17, 0x64, 0x67, 0xea, 0xe7, 0xe5, 0x27, 0xc7, 0x17, 0xe5, 0xe7, 0x97, 0xc4, + 0x27, 0xa7, 0x16, 0x95, 0x64, 0xa6, 0x65, 0x26, 0x27, 0x96, 0xa4, 0x16, 0xeb, 0x15, 0x14, 0xe5, + 0x97, 0xe4, 0x0b, 0xe9, 0xa1, 0x9a, 0xa5, 0x87, 0xc7, 0x2c, 0xbd, 0x82, 0xec, 0x4c, 0x29, 0x91, + 0xf4, 0xfc, 0xf4, 0x7c, 0xb0, 0x56, 0x7d, 0x10, 0x0b, 0x62, 0x8a, 0x94, 0x03, 0x89, 0x2e, 0x42, + 0x72, 0x08, 0xc4, 0x04, 0xa5, 0x59, 0x8c, 0x5c, 0xc2, 0x7e, 0xf9, 0xc9, 0x41, 0xf9, 0xf9, 0x25, + 0xce, 0x48, 0xae, 0x14, 0x32, 0xe0, 0x62, 0x2e, 0xcb, 0x4c, 0x91, 0x60, 0x54, 0x60, 0xd4, 0x60, + 0x75, 0x92, 0xfb, 0x74, 0x4f, 0x5e, 0xaa, 0x2c, 0x31, 0x27, 0x33, 0x25, 0xb1, 0x24, 0xd5, 0x4a, + 0x29, 0xbd, 0x24, 0xd5, 0xd6, 0x50, 0x27, 0xa7, 0x24, 0xd5, 0xd6, 0xcc, 0xd4, 0xd4, 0xd8, 0x54, + 0x29, 0x08, 0xa4, 0x54, 0x28, 0x90, 0x8b, 0x15, 0x64, 0x7c, 0xb1, 0x04, 0x93, 0x02, 0xb3, 0x06, + 0xb7, 0x91, 0x35, 0x89, 0x3e, 0xd4, 0x43, 0xb2, 0x3e, 0x08, 0x62, 0x92, 0x53, 0xdc, 0x89, 0x47, + 0x72, 0x8c, 0x17, 0x1e, 0xc9, 0x31, 0x3e, 0x78, 0x24, 0xc7, 0x38, 0xe1, 0xb1, 0x1c, 0xc3, 0x85, + 0xc7, 0x72, 0x0c, 0x37, 0x1e, 0xcb, 0x31, 0x44, 0xb9, 0xa4, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, + 0x25, 0xe7, 0xe7, 0xea, 0x43, 0xec, 0xd1, 0xc5, 0x16, 0x08, 0xba, 0x08, 0x9b, 0x74, 0xa1, 0xc1, + 0x50, 0x01, 0x0e, 0x88, 0x92, 0xca, 0x82, 0xd4, 0xe2, 0x24, 0x36, 0x70, 0x18, 0x18, 0x03, 0x02, + 0x00, 0x00, 0xff, 0xff, 0xd7, 0xd8, 0x1d, 0x14, 0xd9, 0x01, 0x00, 0x00, } func (m *NocRootCertificates) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/noc_root_certificates_by_vid_and_skid.pb.go b/x/pki/types/noc_root_certificates_by_vid_and_skid.pb.go index 2699d14e2..e53232df1 100644 --- a/x/pki/types/noc_root_certificates_by_vid_and_skid.pb.go +++ b/x/pki/types/noc_root_certificates_by_vid_and_skid.pb.go @@ -1,13 +1,13 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/noc_root_certificates_by_vid_and_skid.proto +// source: zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates_by_vid_and_skid.proto package types import ( encoding_binary "encoding/binary" fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -35,7 +35,7 @@ func (m *NocRootCertificatesByVidAndSkid) Reset() { *m = NocRootCertific func (m *NocRootCertificatesByVidAndSkid) String() string { return proto.CompactTextString(m) } func (*NocRootCertificatesByVidAndSkid) ProtoMessage() {} func (*NocRootCertificatesByVidAndSkid) Descriptor() ([]byte, []int) { - return fileDescriptor_f0b75fe838c6f118, []int{0} + return fileDescriptor_f3e80b73a145a96e, []int{0} } func (m *NocRootCertificatesByVidAndSkid) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -97,33 +97,33 @@ func init() { } func init() { - proto.RegisterFile("pki/noc_root_certificates_by_vid_and_skid.proto", fileDescriptor_f0b75fe838c6f118) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/noc_root_certificates_by_vid_and_skid.proto", fileDescriptor_f3e80b73a145a96e) } -var fileDescriptor_f0b75fe838c6f118 = []byte{ - // 343 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x91, 0xc1, 0x4a, 0xeb, 0x50, - 0x10, 0x86, 0x7b, 0xd2, 0xdb, 0x0b, 0x37, 0xf7, 0x72, 0x17, 0x41, 0x21, 0x74, 0x91, 0x86, 0xac, - 0xb2, 0xb0, 0x89, 0x5a, 0xea, 0x42, 0xe9, 0xc2, 0xea, 0x46, 0x04, 0xc1, 0x08, 0x2e, 0x5c, 0x18, - 0x92, 0x73, 0xc6, 0x38, 0x26, 0xcd, 0x49, 0x93, 0x69, 0x31, 0x3e, 0x85, 0x8f, 0xe5, 0xb2, 0x4b, - 0x71, 0x21, 0xd2, 0xbe, 0x81, 0x4f, 0x20, 0x69, 0x10, 0xe3, 0xd2, 0xdd, 0xf0, 0x33, 0xf3, 0x7f, - 0xff, 0xcc, 0xa8, 0x6e, 0x16, 0xa3, 0x9b, 0x4a, 0xee, 0xe7, 0x52, 0x92, 0xcf, 0x21, 0x27, 0xbc, - 0x41, 0x1e, 0x10, 0x14, 0x7e, 0x58, 0xfa, 0x73, 0x14, 0x7e, 0x90, 0x0a, 0xbf, 0x88, 0x51, 0x38, - 0x59, 0x2e, 0x49, 0x6a, 0xce, 0x03, 0x46, 0x21, 0x40, 0x90, 0x24, 0x18, 0xa4, 0x1c, 0x1c, 0x81, - 0x05, 0xe5, 0x18, 0xce, 0x08, 0x04, 0x97, 0x93, 0xac, 0x56, 0x13, 0x10, 0x11, 0xe4, 0x4e, 0x16, - 0x63, 0x77, 0x23, 0x92, 0x91, 0x5c, 0x8f, 0xba, 0x55, 0x55, 0xbb, 0x74, 0x37, 0x2b, 0x6c, 0x83, - 0x56, 0xcb, 0xd6, 0x0b, 0x53, 0x7b, 0x67, 0x92, 0x7b, 0x52, 0xd2, 0x51, 0x23, 0xca, 0xb8, 0xbc, - 0x44, 0x71, 0x98, 0x8a, 0x8b, 0x18, 0x85, 0xb6, 0xad, 0xb6, 0xe7, 0x28, 0x74, 0x66, 0x32, 0xbb, - 0x33, 0x36, 0xde, 0x5f, 0x7b, 0xdd, 0x79, 0x90, 0xa0, 0x08, 0x08, 0xf6, 0xad, 0x88, 0x60, 0xb4, - 0xb3, 0x95, 0x10, 0x8c, 0xf6, 0x86, 0xc3, 0xc1, 0xd0, 0xf2, 0xaa, 0x56, 0xcd, 0x52, 0xff, 0x15, - 0xb3, 0xf0, 0x0e, 0x38, 0x9d, 0x42, 0x79, 0x22, 0x74, 0xc5, 0x64, 0xf6, 0x1f, 0xef, 0x9b, 0xa6, - 0x9d, 0xab, 0x9d, 0x2a, 0x4e, 0xa1, 0xb7, 0xcd, 0xb6, 0xfd, 0x77, 0xf7, 0xe0, 0x87, 0x6b, 0x3a, - 0x8d, 0xb8, 0x5e, 0xed, 0xa4, 0xfd, 0x57, 0x15, 0x9a, 0xea, 0xbf, 0x4c, 0x66, 0x2b, 0x9e, 0x42, - 0xd3, 0xf1, 0xf5, 0xd3, 0xd2, 0x60, 0x8b, 0xa5, 0xc1, 0xde, 0x96, 0x06, 0x7b, 0x5c, 0x19, 0xad, - 0xc5, 0xca, 0x68, 0x3d, 0xaf, 0x8c, 0xd6, 0xd5, 0x71, 0x84, 0x74, 0x3b, 0x0b, 0x1d, 0x2e, 0x27, - 0x6e, 0xcd, 0xed, 0x7f, 0x82, 0xdd, 0x06, 0xb8, 0xff, 0x45, 0xee, 0xd7, 0x68, 0xf7, 0x7e, 0xfd, - 0x3f, 0x2a, 0x33, 0x28, 0xc2, 0xdf, 0xeb, 0x1b, 0x0e, 0x3e, 0x02, 0x00, 0x00, 0xff, 0xff, 0xbe, - 0x9e, 0x3c, 0xd4, 0xd3, 0x01, 0x00, 0x00, +var fileDescriptor_f3e80b73a145a96e = []byte{ + // 345 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x90, 0x31, 0x4b, 0xc3, 0x40, + 0x14, 0x80, 0x7b, 0xa9, 0x15, 0x8c, 0xe2, 0x10, 0x1c, 0x42, 0x87, 0x6b, 0xc8, 0x94, 0xc1, 0x26, + 0x6a, 0xa9, 0x83, 0x52, 0xd0, 0xea, 0x22, 0x82, 0x60, 0x04, 0x87, 0x0e, 0x86, 0xe4, 0xee, 0x8c, + 0xcf, 0xa4, 0xb9, 0x34, 0x79, 0x2d, 0xc6, 0x5f, 0xe1, 0xcf, 0x72, 0xec, 0x28, 0x0e, 0x22, 0xed, + 0x3f, 0xf0, 0x17, 0x48, 0x1b, 0xc1, 0x16, 0x44, 0xe8, 0x76, 0x3c, 0xee, 0x7d, 0xef, 0xe3, 0x53, + 0x7b, 0xcf, 0x10, 0x06, 0x42, 0xf8, 0x71, 0x0c, 0x7e, 0xc2, 0x84, 0xc3, 0x21, 0xc7, 0x0c, 0x82, + 0x21, 0x0a, 0xce, 0x64, 0x3f, 0x2d, 0xa7, 0xb1, 0xe0, 0xa1, 0xc8, 0x9c, 0x34, 0x02, 0x27, 0x91, + 0xcc, 0xcb, 0xa4, 0x44, 0x8f, 0x89, 0x0c, 0xe1, 0x1e, 0x98, 0x8f, 0x22, 0xf7, 0x82, 0xc2, 0x1b, + 0x01, 0xf7, 0xfc, 0x84, 0x7b, 0x79, 0x04, 0xdc, 0x4e, 0x33, 0x89, 0x52, 0xb3, 0x97, 0xd9, 0xf6, + 0x3f, 0x6c, 0x3b, 0x8d, 0xa0, 0xbe, 0x13, 0xca, 0x50, 0xce, 0x57, 0x9d, 0xd9, 0xab, 0xa4, 0xd4, + 0x4f, 0x56, 0x34, 0x5c, 0x10, 0x2b, 0x09, 0xe6, 0x3b, 0x51, 0x1b, 0x57, 0x92, 0xb9, 0x52, 0xe2, + 0xd9, 0x82, 0x75, 0xb7, 0xb8, 0x05, 0x7e, 0x9a, 0xf0, 0x9b, 0x08, 0xb8, 0xb6, 0xa7, 0x56, 0x47, + 0xc0, 0x75, 0x62, 0x10, 0xab, 0xd6, 0xa5, 0x5f, 0x1f, 0x8d, 0xfa, 0xc8, 0x8f, 0x81, 0xfb, 0x28, + 0x8e, 0xcc, 0x10, 0x45, 0x67, 0x7f, 0x37, 0x46, 0xd1, 0x39, 0x6c, 0xb7, 0x5b, 0x6d, 0xd3, 0x9d, + 0x7d, 0xd5, 0x4c, 0x75, 0x2b, 0x1f, 0x06, 0x8f, 0x82, 0xe1, 0xa5, 0x28, 0x2e, 0xb8, 0xae, 0x18, + 0xc4, 0xda, 0x70, 0x97, 0x66, 0xda, 0xb5, 0x5a, 0x9b, 0xe9, 0xe4, 0x7a, 0xd5, 0xa8, 0x5a, 0x9b, + 0x07, 0xc7, 0x2b, 0x16, 0xb1, 0x17, 0x74, 0xdd, 0x92, 0xa4, 0x6d, 0xab, 0x0a, 0x0e, 0xf4, 0x35, + 0x83, 0x58, 0x8a, 0xab, 0xe0, 0xa0, 0x7b, 0xf7, 0x3a, 0xa1, 0x64, 0x3c, 0xa1, 0xe4, 0x73, 0x42, + 0xc9, 0xcb, 0x94, 0x56, 0xc6, 0x53, 0x5a, 0x79, 0x9b, 0xd2, 0x4a, 0xef, 0x3c, 0x04, 0x7c, 0x18, + 0x06, 0x36, 0x93, 0x7d, 0xa7, 0xbc, 0xdb, 0xfc, 0x2b, 0x62, 0xf3, 0xf7, 0x72, 0xf3, 0x27, 0xe3, + 0xd3, 0x3c, 0x24, 0x16, 0xa9, 0xc8, 0x83, 0xf5, 0x79, 0xc3, 0xd6, 0x77, 0x00, 0x00, 0x00, 0xff, + 0xff, 0x51, 0xc7, 0x5b, 0x3f, 0x29, 0x02, 0x00, 0x00, } func (m *NocRootCertificatesByVidAndSkid) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/pki_revocation_distribution_point.pb.go b/x/pki/types/pki_revocation_distribution_point.pb.go index e6f413592..f158105f9 100644 --- a/x/pki/types/pki_revocation_distribution_point.pb.go +++ b/x/pki/types/pki_revocation_distribution_point.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/pki_revocation_distribution_point.proto +// source: zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_point.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -42,7 +42,7 @@ func (m *PkiRevocationDistributionPoint) Reset() { *m = PkiRevocationDis func (m *PkiRevocationDistributionPoint) String() string { return proto.CompactTextString(m) } func (*PkiRevocationDistributionPoint) ProtoMessage() {} func (*PkiRevocationDistributionPoint) Descriptor() ([]byte, []int) { - return fileDescriptor_35504fa19b856908, []int{0} + return fileDescriptor_e5f43c14e14d3cdf, []int{0} } func (m *PkiRevocationDistributionPoint) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -167,37 +167,37 @@ func init() { } func init() { - proto.RegisterFile("pki/pki_revocation_distribution_point.proto", fileDescriptor_35504fa19b856908) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_point.proto", fileDescriptor_e5f43c14e14d3cdf) } -var fileDescriptor_35504fa19b856908 = []byte{ - // 411 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x92, 0xcd, 0x6e, 0xd4, 0x30, - 0x14, 0x85, 0xc7, 0x4c, 0xa7, 0x3f, 0xa6, 0x83, 0x90, 0xd5, 0x85, 0x57, 0x51, 0x54, 0x21, 0x14, - 0x09, 0x4d, 0x22, 0xc1, 0x13, 0x14, 0x22, 0x24, 0x04, 0x8b, 0x51, 0x06, 0x58, 0xb0, 0xa0, 0x72, - 0x92, 0x4b, 0x7a, 0x99, 0x4c, 0x6c, 0xd9, 0x4e, 0xc5, 0xf4, 0x29, 0x78, 0x2c, 0x96, 0x5d, 0x76, - 0x89, 0x66, 0x5e, 0x04, 0xd9, 0x56, 0xe7, 0x87, 0x76, 0x77, 0xcf, 0xb9, 0x47, 0x9f, 0xe4, 0xeb, - 0x43, 0x5f, 0xa9, 0x39, 0x66, 0x6a, 0x8e, 0x97, 0x1a, 0xae, 0x65, 0x25, 0x2c, 0xca, 0xee, 0xb2, - 0x46, 0x63, 0x35, 0x96, 0xbd, 0x17, 0x4a, 0x62, 0x67, 0x53, 0xa5, 0xa5, 0x95, 0x2c, 0xbd, 0xc1, - 0xa6, 0x04, 0x10, 0x6d, 0x8b, 0xa2, 0xab, 0x20, 0xdd, 0x04, 0xa1, 0xae, 0xe4, 0x42, 0x05, 0xb7, - 0x85, 0xba, 0x01, 0x9d, 0xaa, 0x39, 0x9e, 0xdf, 0x0d, 0x69, 0x34, 0x9d, 0x63, 0xb1, 0x41, 0xe7, - 0x3b, 0xe4, 0xa9, 0x03, 0xb3, 0xe7, 0x74, 0x78, 0x8d, 0x35, 0x27, 0x31, 0x49, 0x46, 0x85, 0x1b, - 0xd9, 0x19, 0x1d, 0xb5, 0xa2, 0x84, 0x96, 0x3f, 0x89, 0x49, 0x72, 0x52, 0x04, 0xc1, 0x52, 0xca, - 0xd0, 0x98, 0x1e, 0xf4, 0xac, 0x2f, 0x7f, 0x42, 0x65, 0x3f, 0xc2, 0xf2, 0x43, 0xce, 0x87, 0x3e, - 0xf2, 0xc8, 0xc6, 0x71, 0x15, 0xd6, 0xfc, 0x20, 0x70, 0x55, 0xe0, 0xa2, 0x99, 0x5e, 0x5c, 0xf0, - 0x51, 0x4c, 0x92, 0xe3, 0x22, 0x08, 0xf6, 0x9a, 0x9e, 0x55, 0xba, 0x9d, 0x61, 0xd3, 0x81, 0x7e, - 0x07, 0xda, 0xe2, 0x0f, 0xac, 0x84, 0x05, 0x7e, 0xe8, 0xc9, 0x8f, 0xee, 0x18, 0xa7, 0x47, 0xb5, - 0xb0, 0xe2, 0x4b, 0xf1, 0x89, 0x1f, 0xf9, 0xd8, 0xbd, 0x64, 0xe7, 0xf4, 0xd4, 0x8d, 0xef, 0xb1, - 0x85, 0x19, 0xde, 0x00, 0x3f, 0x8e, 0x49, 0x72, 0x50, 0xec, 0x79, 0x2c, 0xa2, 0xd4, 0xe9, 0x1c, - 0x1b, 0x30, 0x96, 0x9f, 0x78, 0xc0, 0x8e, 0xc3, 0x5e, 0xd2, 0x67, 0x5b, 0xf5, 0x79, 0xa9, 0x80, - 0xd3, 0x98, 0x24, 0xe3, 0xe2, 0x3f, 0xd7, 0xe5, 0xb6, 0x7f, 0xe6, 0x73, 0x4f, 0x43, 0x6e, 0xdf, - 0x65, 0x2f, 0xe8, 0xd8, 0x54, 0x57, 0xb0, 0x10, 0x5f, 0x41, 0x1b, 0x94, 0x1d, 0x3f, 0xf5, 0xb1, - 0x7d, 0xd3, 0xdd, 0x77, 0xf3, 0xd6, 0x1c, 0x5a, 0x68, 0x84, 0x95, 0x9a, 0x8f, 0xc3, 0x7d, 0x1f, - 0x6e, 0xde, 0x7e, 0xff, 0xb3, 0x8a, 0xc8, 0xed, 0x2a, 0x22, 0x7f, 0x57, 0x11, 0xf9, 0xbd, 0x8e, - 0x06, 0xb7, 0xeb, 0x68, 0x70, 0xb7, 0x8e, 0x06, 0xdf, 0xf2, 0x06, 0xed, 0x55, 0x5f, 0xa6, 0x95, - 0x5c, 0x64, 0xa1, 0x2f, 0x93, 0xfb, 0xc2, 0x64, 0x3b, 0x85, 0x99, 0x6c, 0x1b, 0x33, 0x09, 0x95, - 0xc9, 0x7e, 0xb9, 0x22, 0x66, 0x76, 0xa9, 0xc0, 0x94, 0x87, 0xbe, 0x71, 0x6f, 0xfe, 0x05, 0x00, - 0x00, 0xff, 0xff, 0x13, 0xa7, 0x00, 0x1d, 0xa0, 0x02, 0x00, 0x00, +var fileDescriptor_e5f43c14e14d3cdf = []byte{ + // 413 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xcd, 0x6e, 0x13, 0x31, + 0x14, 0x85, 0x63, 0xd2, 0xf4, 0xc7, 0x34, 0x08, 0x59, 0x5d, 0x78, 0x35, 0x1a, 0x55, 0x08, 0xcd, + 0x26, 0x13, 0x09, 0x9e, 0xa0, 0x30, 0x42, 0x42, 0xb0, 0x88, 0x26, 0xd0, 0x05, 0x0b, 0x2a, 0xcf, + 0xcc, 0x65, 0x7a, 0x89, 0x33, 0xb6, 0x6c, 0xa7, 0x22, 0x7d, 0x0a, 0x1e, 0x8b, 0x65, 0x97, 0x5d, + 0xa2, 0xe4, 0x45, 0x90, 0x6d, 0x48, 0x1a, 0x88, 0x58, 0x8c, 0x74, 0xcf, 0xb9, 0x47, 0x9f, 0x34, + 0xd7, 0x87, 0x5e, 0xde, 0x62, 0x5b, 0x01, 0x08, 0x29, 0x51, 0x74, 0x35, 0x8c, 0x1b, 0xb4, 0xce, + 0x60, 0xb5, 0x70, 0xd0, 0xd4, 0x6a, 0xae, 0xa3, 0x2b, 0xa1, 0x69, 0xc1, 0x8c, 0xf5, 0x0c, 0xfd, + 0x77, 0x65, 0xe0, 0x46, 0xd5, 0xc2, 0xa1, 0xea, 0xae, 0x36, 0x71, 0x2f, 0xb4, 0xc2, 0xce, 0xe5, + 0xda, 0x28, 0xa7, 0x58, 0xbe, 0xcb, 0xcd, 0xff, 0xc3, 0xcd, 0xf5, 0x0c, 0xcf, 0xef, 0xfb, 0x34, + 0x99, 0xcc, 0xb0, 0xdc, 0xa0, 0x8b, 0x07, 0xe4, 0x89, 0x07, 0xb3, 0xa7, 0xb4, 0x7f, 0x83, 0x0d, + 0x27, 0x29, 0xc9, 0x06, 0xa5, 0x1f, 0xd9, 0x19, 0x1d, 0x48, 0x51, 0x81, 0xe4, 0x8f, 0x52, 0x92, + 0x9d, 0x94, 0x51, 0xb0, 0x9c, 0x32, 0xb4, 0x76, 0x01, 0x66, 0xba, 0xa8, 0xbe, 0x42, 0xed, 0xde, + 0xc1, 0xf2, 0x6d, 0xc1, 0xfb, 0x21, 0xb2, 0x67, 0xe3, 0xb9, 0x1a, 0x1b, 0x7e, 0x10, 0xb9, 0x3a, + 0x72, 0xd1, 0x4e, 0x2e, 0x2e, 0xf8, 0x20, 0x25, 0xd9, 0x71, 0x19, 0x05, 0x7b, 0x41, 0xcf, 0x6a, + 0x23, 0xa7, 0xd8, 0x76, 0x60, 0x5e, 0x83, 0x71, 0xf8, 0x05, 0x6b, 0xe1, 0x80, 0x1f, 0x06, 0xf2, + 0xde, 0x1d, 0xe3, 0xf4, 0xa8, 0x11, 0x4e, 0x7c, 0x2c, 0xdf, 0xf3, 0xa3, 0x10, 0xfb, 0x23, 0xd9, + 0x39, 0x3d, 0xf5, 0xe3, 0x1b, 0x94, 0x30, 0xc5, 0x5b, 0xe0, 0xc7, 0x29, 0xc9, 0x0e, 0xca, 0x1d, + 0x8f, 0x25, 0x94, 0x7a, 0x5d, 0x60, 0x0b, 0xd6, 0xf1, 0x93, 0x00, 0x78, 0xe0, 0xb0, 0xe7, 0xf4, + 0xc9, 0x56, 0x7d, 0x58, 0x6a, 0xe0, 0x34, 0x25, 0xd9, 0xb0, 0xfc, 0xcb, 0xf5, 0xb9, 0xed, 0x9b, + 0x85, 0xdc, 0xe3, 0x98, 0xdb, 0x75, 0xd9, 0x33, 0x3a, 0xb4, 0xf5, 0x35, 0xcc, 0xc5, 0x25, 0x18, + 0x8b, 0xaa, 0xe3, 0xa7, 0x21, 0xb6, 0x6b, 0xfa, 0xfb, 0x6e, 0xfe, 0xb5, 0x00, 0x09, 0xad, 0x70, + 0xca, 0xf0, 0x61, 0xbc, 0xef, 0xbf, 0x9b, 0x57, 0x9f, 0x7f, 0xac, 0x12, 0x72, 0xb7, 0x4a, 0xc8, + 0xcf, 0x55, 0x42, 0xbe, 0xaf, 0x93, 0xde, 0xdd, 0x3a, 0xe9, 0xdd, 0xaf, 0x93, 0xde, 0xa7, 0xa2, + 0x45, 0x77, 0xbd, 0xa8, 0xf2, 0x5a, 0xcd, 0xc7, 0xb1, 0x2f, 0xa3, 0x7d, 0x45, 0x1c, 0x6d, 0x1b, + 0x33, 0xfa, 0x5d, 0xc5, 0x6f, 0xa1, 0x8c, 0x6e, 0xa9, 0xc1, 0x56, 0x87, 0xa1, 0x71, 0x2f, 0x7f, + 0x05, 0x00, 0x00, 0xff, 0xff, 0x33, 0xf4, 0x8f, 0x1a, 0xcb, 0x02, 0x00, 0x00, } func (m *PkiRevocationDistributionPoint) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/pki_revocation_distribution_points_by_issuer_subject_key_id.pb.go b/x/pki/types/pki_revocation_distribution_points_by_issuer_subject_key_id.pb.go index 53bff1797..af7deff1c 100644 --- a/x/pki/types/pki_revocation_distribution_points_by_issuer_subject_key_id.pb.go +++ b/x/pki/types/pki_revocation_distribution_points_by_issuer_subject_key_id.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/pki_revocation_distribution_points_by_issuer_subject_key_id.proto +// source: zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_points_by_issuer_subject_key_id.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -35,7 +35,7 @@ func (m *PkiRevocationDistributionPointsByIssuerSubjectKeyID) String() string { } func (*PkiRevocationDistributionPointsByIssuerSubjectKeyID) ProtoMessage() {} func (*PkiRevocationDistributionPointsByIssuerSubjectKeyID) Descriptor() ([]byte, []int) { - return fileDescriptor_304eb676640a574a, []int{0} + return fileDescriptor_0b1940ba04139c1b, []int{0} } func (m *PkiRevocationDistributionPointsByIssuerSubjectKeyID) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -83,29 +83,29 @@ func init() { } func init() { - proto.RegisterFile("pki/pki_revocation_distribution_points_by_issuer_subject_key_id.proto", fileDescriptor_304eb676640a574a) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/pki_revocation_distribution_points_by_issuer_subject_key_id.proto", fileDescriptor_0b1940ba04139c1b) } -var fileDescriptor_304eb676640a574a = []byte{ - // 276 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x90, 0x41, 0x4b, 0xc3, 0x30, - 0x14, 0xc7, 0x1b, 0x85, 0x81, 0xf5, 0xd6, 0xd3, 0xf0, 0x10, 0x86, 0xa7, 0x81, 0x34, 0x05, 0xf7, - 0x0d, 0x46, 0x3d, 0x0c, 0x41, 0x46, 0xbd, 0x79, 0x30, 0x34, 0xed, 0xb3, 0x3e, 0xdb, 0x35, 0xa1, - 0x49, 0xc5, 0xf8, 0x29, 0xfc, 0x58, 0x5e, 0x84, 0x1d, 0x3d, 0x4a, 0xfb, 0x45, 0xc4, 0x46, 0xe7, - 0x40, 0x11, 0x77, 0xcc, 0xcb, 0xe3, 0xff, 0xfb, 0xbd, 0xbf, 0x7f, 0xa6, 0x4a, 0x8c, 0x54, 0x89, - 0xbc, 0x81, 0x7b, 0x99, 0xa5, 0x06, 0x65, 0xcd, 0x73, 0xd4, 0xa6, 0x41, 0xd1, 0x0e, 0x0f, 0x25, - 0xb1, 0x36, 0x9a, 0x0b, 0xcb, 0x51, 0xeb, 0x16, 0x1a, 0xae, 0x5b, 0x71, 0x07, 0x99, 0xe1, 0x25, - 0x58, 0x8e, 0x39, 0x53, 0x8d, 0x34, 0x32, 0x60, 0x8f, 0x58, 0x08, 0x80, 0xb4, 0xaa, 0x30, 0xad, - 0x33, 0x60, 0x9b, 0x08, 0xc8, 0x33, 0xb9, 0x52, 0x6e, 0x5a, 0x41, 0x5e, 0x40, 0xc3, 0x54, 0x89, - 0x47, 0x27, 0xff, 0xc2, 0xba, 0xf0, 0xe3, 0x17, 0xe2, 0xcf, 0x96, 0x25, 0x26, 0x9b, 0xd5, 0x78, - 0x6b, 0x73, 0x39, 0xf8, 0xcd, 0xed, 0x62, 0xb0, 0xbb, 0x74, 0x72, 0xe7, 0x60, 0x17, 0x71, 0xc0, - 0xfc, 0x00, 0x7f, 0x4c, 0xc7, 0x64, 0x42, 0xa6, 0x07, 0xc9, 0x2f, 0x3f, 0xc1, 0x8d, 0x3f, 0x72, - 0xd7, 0x8e, 0xf7, 0x26, 0xfb, 0xd3, 0xc3, 0xd3, 0x8b, 0x1d, 0xaf, 0x62, 0x7f, 0x4b, 0x26, 0x9f, - 0xe9, 0xf3, 0xeb, 0xe7, 0x8e, 0x92, 0x75, 0x47, 0xc9, 0x5b, 0x47, 0xc9, 0x53, 0x4f, 0xbd, 0x75, - 0x4f, 0xbd, 0xd7, 0x9e, 0x7a, 0x57, 0x71, 0x81, 0xe6, 0xb6, 0x15, 0x2c, 0x93, 0xab, 0xc8, 0xb1, - 0xc3, 0x2f, 0x78, 0xb4, 0x05, 0x0f, 0xbf, 0xe9, 0xa1, 0xc3, 0x47, 0x0f, 0x1f, 0x6d, 0x46, 0xc6, - 0x2a, 0xd0, 0x62, 0x34, 0xd4, 0x36, 0x7b, 0x0f, 0x00, 0x00, 0xff, 0xff, 0x01, 0x2f, 0xb0, 0x5d, - 0xdc, 0x01, 0x00, 0x00, +var fileDescriptor_0b1940ba04139c1b = []byte{ + // 278 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x2a, 0xa8, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x17, 0x64, 0x67, 0x82, 0x70, 0x7c, 0x51, 0x6a, 0x59, 0x7e, 0x72, 0x62, 0x49, + 0x66, 0x7e, 0x5e, 0x3c, 0x5c, 0x39, 0x88, 0x53, 0x90, 0x9f, 0x99, 0x57, 0x52, 0x1c, 0x9f, 0x54, + 0x19, 0x9f, 0x59, 0x5c, 0x5c, 0x9a, 0x5a, 0x14, 0x5f, 0x5c, 0x9a, 0x94, 0x95, 0x9a, 0x5c, 0x12, + 0x9f, 0x9d, 0x5a, 0x19, 0x9f, 0x99, 0xa2, 0x57, 0x50, 0x94, 0x5f, 0x92, 0x2f, 0xa4, 0x87, 0x6a, + 0xa3, 0x1e, 0x1e, 0x1b, 0xf5, 0x0a, 0xb2, 0x33, 0xa5, 0xc2, 0xa8, 0xed, 0x42, 0x88, 0x3b, 0x94, + 0xce, 0x32, 0x72, 0x19, 0x07, 0x64, 0x67, 0x06, 0xc1, 0x95, 0xba, 0x20, 0xa9, 0x0c, 0x00, 0x7b, + 0xc5, 0xa9, 0xd2, 0x13, 0xec, 0x91, 0x60, 0x88, 0x3f, 0xbc, 0x53, 0x2b, 0x3d, 0x5d, 0x84, 0xf4, + 0xb8, 0x84, 0x32, 0x31, 0x44, 0x25, 0x18, 0x15, 0x18, 0x35, 0x38, 0x83, 0xb0, 0xc8, 0x08, 0xa5, + 0x71, 0xb1, 0x41, 0x02, 0x46, 0x82, 0x49, 0x81, 0x59, 0x83, 0xdb, 0xc8, 0x8f, 0xc4, 0x00, 0xd0, + 0xc3, 0xef, 0xc8, 0x20, 0xa8, 0xe9, 0x4e, 0x71, 0x27, 0x1e, 0xc9, 0x31, 0x5e, 0x78, 0x24, 0xc7, + 0xf8, 0xe0, 0x91, 0x1c, 0xe3, 0x84, 0xc7, 0x72, 0x0c, 0x17, 0x1e, 0xcb, 0x31, 0xdc, 0x78, 0x2c, + 0xc7, 0x10, 0xe5, 0x92, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, 0xab, 0x0f, 0xb1, + 0x5b, 0x17, 0x5b, 0x68, 0xea, 0x22, 0x6c, 0xd7, 0x85, 0x86, 0x67, 0x05, 0x38, 0x44, 0x4b, 0x2a, + 0x0b, 0x52, 0x8b, 0x93, 0xd8, 0xc0, 0xc1, 0x66, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0x94, 0x1c, + 0xff, 0x07, 0x32, 0x02, 0x00, 0x00, } func (m *PkiRevocationDistributionPointsByIssuerSubjectKeyID) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/proposed_certificate.pb.go b/x/pki/types/proposed_certificate.pb.go index ec6c271b6..38f6607b8 100644 --- a/x/pki/types/proposed_certificate.pb.go +++ b/x/pki/types/proposed_certificate.pb.go @@ -1,13 +1,13 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/proposed_certificate.proto +// source: zigbeealliance/distributedcomplianceledger/pki/proposed_certificate.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -42,7 +42,7 @@ func (m *ProposedCertificate) Reset() { *m = ProposedCertificate{} } func (m *ProposedCertificate) String() string { return proto.CompactTextString(m) } func (*ProposedCertificate) ProtoMessage() {} func (*ProposedCertificate) Descriptor() ([]byte, []int) { - return fileDescriptor_589d443e04a789ec, []int{0} + return fileDescriptor_d41c51041f3e0eb2, []int{0} } func (m *ProposedCertificate) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -152,39 +152,41 @@ func init() { proto.RegisterType((*ProposedCertificate)(nil), "zigbeealliance.distributedcomplianceledger.pki.ProposedCertificate") } -func init() { proto.RegisterFile("pki/proposed_certificate.proto", fileDescriptor_589d443e04a789ec) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/proposed_certificate.proto", fileDescriptor_d41c51041f3e0eb2) +} -var fileDescriptor_589d443e04a789ec = []byte{ - // 459 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x92, 0xc1, 0x6e, 0x13, 0x31, - 0x10, 0x86, 0xb3, 0xa4, 0x69, 0x88, 0x4b, 0x85, 0x30, 0x3d, 0x98, 0x1c, 0x96, 0x28, 0xe2, 0x90, - 0x43, 0xb3, 0x0b, 0x54, 0xe1, 0x80, 0xd4, 0x43, 0x03, 0x12, 0x42, 0x48, 0x80, 0x36, 0x88, 0x03, - 0x07, 0x2a, 0xef, 0x7a, 0x70, 0x4d, 0x77, 0xd7, 0x96, 0xed, 0x84, 0x96, 0xa7, 0x40, 0xe2, 0x55, - 0x78, 0x08, 0x8e, 0x15, 0x27, 0x4e, 0x08, 0x25, 0x6f, 0xc0, 0x13, 0x20, 0xaf, 0x37, 0x4a, 0x57, - 0x9c, 0x50, 0x6f, 0x9e, 0x7f, 0xe6, 0x1f, 0xcf, 0x37, 0x1a, 0x14, 0xaa, 0x53, 0x11, 0x2b, 0x2d, - 0x95, 0x34, 0xc0, 0x8e, 0x33, 0xd0, 0x56, 0x7c, 0x10, 0x19, 0xb5, 0x10, 0x29, 0x2d, 0xad, 0xc4, - 0xd1, 0x67, 0xc1, 0x53, 0x00, 0x9a, 0xe7, 0x82, 0x96, 0x19, 0x44, 0x4c, 0x18, 0xab, 0x45, 0x3a, - 0xb7, 0xc0, 0x32, 0x59, 0x28, 0xaf, 0xe6, 0xc0, 0x38, 0xe8, 0x48, 0x9d, 0x8a, 0xfe, 0x9d, 0x4c, - 0x9a, 0x42, 0x9a, 0xe3, 0xca, 0x1d, 0xfb, 0xc0, 0xb7, 0xea, 0xef, 0x71, 0xc9, 0xa5, 0xd7, 0xdd, - 0xab, 0x56, 0x6f, 0xba, 0x01, 0xb8, 0xa6, 0xa5, 0xf5, 0xc2, 0xf0, 0xeb, 0x16, 0xba, 0xfd, 0xba, - 0x1e, 0xe8, 0xc9, 0x66, 0x1e, 0x4c, 0x50, 0xd7, 0xcc, 0xd3, 0x8f, 0x90, 0x59, 0x12, 0x0c, 0x82, - 0x51, 0x2f, 0x59, 0x87, 0x78, 0x88, 0x6e, 0xd4, 0xcf, 0x17, 0x70, 0xfe, 0x9c, 0x91, 0x6b, 0x55, - 0xba, 0xa1, 0x39, 0xb7, 0x82, 0xc2, 0xf5, 0x23, 0x6d, 0xef, 0xae, 0xc3, 0xca, 0x0d, 0x5a, 0xd0, - 0xfc, 0xe5, 0xbc, 0x48, 0x41, 0x93, 0xad, 0xda, 0x7d, 0x49, 0xc3, 0x11, 0xea, 0xc8, 0x4f, 0x25, - 0x68, 0xd2, 0x71, 0xc9, 0x29, 0xf9, 0xf1, 0x6d, 0xbc, 0x57, 0xb3, 0x1d, 0x31, 0xa6, 0xc1, 0x98, - 0x99, 0xd5, 0xa2, 0xe4, 0x89, 0x2f, 0xc3, 0x33, 0xd4, 0xa3, 0x4a, 0x69, 0xb9, 0xa0, 0xb9, 0x21, - 0xdb, 0x83, 0xf6, 0x68, 0xe7, 0xe1, 0xe4, 0x3f, 0x37, 0x19, 0x3d, 0x73, 0x3b, 0x49, 0x36, 0x7d, - 0xf0, 0x3d, 0xb4, 0x5b, 0x23, 0x1d, 0x99, 0x37, 0x70, 0x66, 0x49, 0xb7, 0x9a, 0xb4, 0x29, 0xe2, - 0x57, 0xa8, 0xab, 0xc1, 0xc5, 0x86, 0x5c, 0xbf, 0xca, 0xc7, 0xeb, 0x2e, 0xf8, 0x3e, 0x6a, 0x2f, - 0x04, 0x23, 0xbd, 0x41, 0x30, 0xea, 0x4c, 0xc3, 0x3f, 0xbf, 0xee, 0xf6, 0x17, 0x34, 0x17, 0x8c, - 0x5a, 0x78, 0x3c, 0xe4, 0x16, 0x0e, 0x1f, 0xec, 0xe7, 0x16, 0x0e, 0x1f, 0x4d, 0x26, 0x07, 0x93, - 0x61, 0xe2, 0x4a, 0xf1, 0x3e, 0xba, 0xe5, 0x0e, 0x69, 0x96, 0x9d, 0x40, 0x41, 0xdf, 0x82, 0x36, - 0x42, 0x96, 0x04, 0x0d, 0x82, 0xd1, 0x6e, 0xf2, 0x6f, 0xa2, 0xc2, 0x6a, 0x54, 0xee, 0x54, 0x95, - 0x4d, 0x71, 0xfa, 0xfe, 0xfb, 0x32, 0x0c, 0x2e, 0x96, 0x61, 0xf0, 0x7b, 0x19, 0x06, 0x5f, 0x56, - 0x61, 0xeb, 0x62, 0x15, 0xb6, 0x7e, 0xae, 0xc2, 0xd6, 0xbb, 0xa7, 0x5c, 0xd8, 0x93, 0x79, 0x1a, - 0x65, 0xb2, 0x88, 0x3d, 0xe9, 0x78, 0x8d, 0x1a, 0x5f, 0x42, 0x1d, 0x6f, 0x58, 0xc7, 0x1e, 0x36, - 0x3e, 0x8b, 0xdd, 0xed, 0xd9, 0x73, 0x05, 0x26, 0xdd, 0xae, 0x8e, 0xef, 0xe0, 0x6f, 0x00, 0x00, - 0x00, 0xff, 0xff, 0xd0, 0x09, 0x4d, 0x47, 0x10, 0x03, 0x00, 0x00, +var fileDescriptor_d41c51041f3e0eb2 = []byte{ + // 462 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x52, 0xc1, 0x6e, 0x13, 0x31, + 0x10, 0xcd, 0x92, 0xa6, 0x21, 0x2e, 0x3d, 0x60, 0x7a, 0x30, 0x39, 0x2c, 0x51, 0xc4, 0x21, 0x87, + 0x66, 0x17, 0xa8, 0xc2, 0xa1, 0x52, 0x0f, 0x0d, 0x48, 0xa8, 0x42, 0x02, 0xb4, 0x41, 0x1c, 0x38, + 0x50, 0x79, 0xd7, 0xc3, 0xd6, 0x74, 0x77, 0x6d, 0xd9, 0x4e, 0x68, 0xf9, 0x0a, 0x24, 0x7e, 0x85, + 0x8f, 0xe0, 0x58, 0x71, 0xe2, 0x84, 0x50, 0xf2, 0x07, 0x7c, 0x01, 0x5a, 0xdb, 0x55, 0x58, 0x81, + 0x90, 0x2a, 0x6e, 0x9e, 0x37, 0xf3, 0x9e, 0xe7, 0x8d, 0x1e, 0x3a, 0xfa, 0xc0, 0xf3, 0x14, 0x80, + 0x16, 0x05, 0xa7, 0x55, 0x06, 0x31, 0xe3, 0xda, 0x28, 0x9e, 0xce, 0x0d, 0xb0, 0x4c, 0x94, 0xd2, + 0xa1, 0x05, 0xb0, 0x1c, 0x54, 0x2c, 0x4f, 0x79, 0x2c, 0x95, 0x90, 0x42, 0x03, 0x3b, 0xce, 0x40, + 0x19, 0xfe, 0x96, 0x67, 0xd4, 0x40, 0x24, 0x95, 0x30, 0x02, 0x47, 0x4d, 0xa9, 0xe8, 0x1f, 0x52, + 0x91, 0x3c, 0xe5, 0xfd, 0xdb, 0x99, 0xd0, 0xa5, 0xd0, 0xc7, 0x96, 0x1d, 0xbb, 0xc2, 0x49, 0xf5, + 0x77, 0x72, 0x91, 0x0b, 0x87, 0xd7, 0x2f, 0x8f, 0xee, 0x5f, 0x71, 0xd7, 0x5c, 0xd1, 0xca, 0x38, + 0xee, 0xf0, 0xd3, 0x06, 0xba, 0xf5, 0xc2, 0xef, 0xfe, 0x68, 0xbd, 0x3a, 0x26, 0xa8, 0xab, 0xe7, + 0xe9, 0x3b, 0xc8, 0x0c, 0x09, 0x06, 0xc1, 0xa8, 0x97, 0x5c, 0x96, 0x78, 0x88, 0x6e, 0xf8, 0xe7, + 0x53, 0x38, 0x3f, 0x62, 0xe4, 0x9a, 0x6d, 0x37, 0xb0, 0x9a, 0x2d, 0xa1, 0xac, 0xf5, 0x48, 0xdb, + 0xb1, 0x7d, 0x69, 0xd9, 0xa0, 0x38, 0x2d, 0x9e, 0xcd, 0xcb, 0x14, 0x14, 0xd9, 0xf0, 0xec, 0xdf, + 0x30, 0x1c, 0xa1, 0x8e, 0x78, 0x5f, 0x81, 0x22, 0x9d, 0xba, 0x39, 0x25, 0x5f, 0x3f, 0x8f, 0x77, + 0xfc, 0x19, 0x0e, 0x19, 0x53, 0xa0, 0xf5, 0xcc, 0x28, 0x5e, 0xe5, 0x89, 0x1b, 0xc3, 0x33, 0xd4, + 0xa3, 0x52, 0x2a, 0xb1, 0xa0, 0x85, 0x26, 0x9b, 0x83, 0xf6, 0x68, 0xeb, 0xc1, 0xe4, 0x8a, 0x47, + 0x8f, 0x9e, 0xd4, 0x37, 0x49, 0xd6, 0x3a, 0xf8, 0x2e, 0xda, 0xf6, 0x96, 0x0e, 0xf5, 0x4b, 0x38, + 0x33, 0xa4, 0x6b, 0x37, 0x6d, 0x82, 0xf8, 0x39, 0xea, 0x2a, 0xa8, 0x6b, 0x4d, 0xae, 0xff, 0xcf, + 0xc7, 0x97, 0x2a, 0xf8, 0x1e, 0x6a, 0x2f, 0x38, 0x23, 0xbd, 0x41, 0x30, 0xea, 0x4c, 0xc3, 0x9f, + 0xdf, 0xef, 0xf4, 0x17, 0xb4, 0xe0, 0x8c, 0x1a, 0xd8, 0x1f, 0xe6, 0x06, 0x0e, 0xee, 0xef, 0x16, + 0x06, 0x0e, 0x1e, 0x4e, 0x26, 0x7b, 0x93, 0x61, 0x52, 0x8f, 0xe2, 0x5d, 0x74, 0xb3, 0xce, 0xdc, + 0x2c, 0x3b, 0x81, 0x92, 0xbe, 0x02, 0xa5, 0xb9, 0xa8, 0x08, 0x1a, 0x04, 0xa3, 0xed, 0xe4, 0xcf, + 0x86, 0xb5, 0xd5, 0x98, 0xdc, 0xb2, 0x93, 0x4d, 0x70, 0xfa, 0xe6, 0xcb, 0x32, 0x0c, 0x2e, 0x96, + 0x61, 0xf0, 0x63, 0x19, 0x06, 0x1f, 0x57, 0x61, 0xeb, 0x62, 0x15, 0xb6, 0xbe, 0xad, 0xc2, 0xd6, + 0xeb, 0xc7, 0x39, 0x37, 0x27, 0xf3, 0x34, 0xca, 0x44, 0x19, 0x3b, 0xa7, 0xe3, 0xbf, 0xe5, 0x6e, + 0xbc, 0xf6, 0x3a, 0xf6, 0xc9, 0x3b, 0xb3, 0xd9, 0x33, 0xe7, 0x12, 0x74, 0xba, 0x69, 0xc3, 0xb7, + 0xf7, 0x2b, 0x00, 0x00, 0xff, 0xff, 0x71, 0x78, 0x94, 0xba, 0x66, 0x03, 0x00, 0x00, } func (m *ProposedCertificate) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/proposed_certificate_revocation.pb.go b/x/pki/types/proposed_certificate_revocation.pb.go index 9637e91fc..1386c93e3 100644 --- a/x/pki/types/proposed_certificate_revocation.pb.go +++ b/x/pki/types/proposed_certificate_revocation.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/proposed_certificate_revocation.proto +// source: zigbeealliance/distributedcomplianceledger/pki/proposed_certificate_revocation.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -36,7 +36,7 @@ func (m *ProposedCertificateRevocation) Reset() { *m = ProposedCertifica func (m *ProposedCertificateRevocation) String() string { return proto.CompactTextString(m) } func (*ProposedCertificateRevocation) ProtoMessage() {} func (*ProposedCertificateRevocation) Descriptor() ([]byte, []int) { - return fileDescriptor_24b0dc6e71a9ad57, []int{0} + return fileDescriptor_36ac7b8e2a2435b6, []int{0} } func (m *ProposedCertificateRevocation) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -119,33 +119,33 @@ func init() { } func init() { - proto.RegisterFile("pki/proposed_certificate_revocation.proto", fileDescriptor_24b0dc6e71a9ad57) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/proposed_certificate_revocation.proto", fileDescriptor_36ac7b8e2a2435b6) } -var fileDescriptor_24b0dc6e71a9ad57 = []byte{ - // 345 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x91, 0x4f, 0x4b, 0xc3, 0x30, - 0x18, 0xc6, 0xd7, 0x4d, 0x37, 0x97, 0x39, 0x84, 0x9e, 0x82, 0x60, 0x29, 0xc3, 0x43, 0x3d, 0xac, - 0x05, 0xc5, 0x0f, 0xa0, 0x13, 0x44, 0x04, 0x91, 0x2a, 0x1e, 0x3c, 0x38, 0xd2, 0xf4, 0xb5, 0x8b, - 0x6b, 0x9b, 0x90, 0xa4, 0x63, 0xf3, 0x53, 0xf8, 0xa1, 0x3c, 0x78, 0xdc, 0xd1, 0xa3, 0x6c, 0x5f, - 0x44, 0xfa, 0x67, 0x6e, 0x3b, 0x7a, 0x4b, 0x7e, 0x81, 0xf7, 0xf7, 0x3c, 0x79, 0xd1, 0x89, 0x18, - 0x33, 0x4f, 0x48, 0x2e, 0xb8, 0x82, 0x70, 0x48, 0x41, 0x6a, 0xf6, 0xca, 0x28, 0xd1, 0x30, 0x94, - 0x30, 0xe1, 0x94, 0x68, 0xc6, 0x53, 0x57, 0x48, 0xae, 0xb9, 0xe9, 0xbe, 0xb3, 0x28, 0x00, 0x20, - 0x71, 0xcc, 0x48, 0x4a, 0xc1, 0x0d, 0x99, 0xd2, 0x92, 0x05, 0x99, 0x86, 0x90, 0xf2, 0x44, 0x94, - 0x34, 0x86, 0x30, 0x02, 0xe9, 0x8a, 0x31, 0x3b, 0x3c, 0xc8, 0x47, 0x47, 0x92, 0xa4, 0xba, 0x1c, - 0xd0, 0xfb, 0xac, 0xa3, 0xa3, 0xfb, 0x4a, 0x35, 0x58, 0x9b, 0xfc, 0x3f, 0x91, 0x89, 0x51, 0x4b, - 0x65, 0xc1, 0x1b, 0x50, 0x8d, 0x0d, 0xdb, 0x70, 0xda, 0xfe, 0xea, 0x6a, 0xf6, 0xd0, 0x7e, 0x75, - 0xbc, 0x85, 0xd9, 0x4d, 0x88, 0xeb, 0xc5, 0xf3, 0x16, 0x33, 0x1f, 0x50, 0x9b, 0x08, 0x21, 0xf9, - 0x84, 0xc4, 0x0a, 0x37, 0xec, 0x86, 0xd3, 0x39, 0x3d, 0xff, 0x67, 0x68, 0xf7, 0x3a, 0xcf, 0xeb, - 0xaf, 0xe7, 0x98, 0xc7, 0xa8, 0x5b, 0x49, 0x2e, 0xd4, 0x23, 0x4c, 0x35, 0xde, 0x29, 0xcc, 0xdb, - 0xb0, 0x88, 0x07, 0x92, 0x91, 0xf8, 0x2e, 0x4b, 0x02, 0x90, 0x78, 0xb7, 0x8a, 0xb7, 0xc1, 0x4c, - 0x1b, 0x75, 0xf2, 0x3f, 0x1d, 0xc3, 0x60, 0xc4, 0xe2, 0x10, 0x37, 0x6d, 0xc3, 0xd9, 0xf3, 0x37, - 0x51, 0xe1, 0xa2, 0x23, 0x48, 0xc8, 0x13, 0x48, 0xc5, 0x78, 0x8a, 0x5b, 0xb6, 0xe1, 0x74, 0xfd, - 0x6d, 0x78, 0xf9, 0xf2, 0xb5, 0xb0, 0x8c, 0xf9, 0xc2, 0x32, 0x7e, 0x16, 0x96, 0xf1, 0xb1, 0xb4, - 0x6a, 0xf3, 0xa5, 0x55, 0xfb, 0x5e, 0x5a, 0xb5, 0xe7, 0xab, 0x88, 0xe9, 0x51, 0x16, 0xb8, 0x94, - 0x27, 0x5e, 0xd9, 0xbb, 0xbf, 0x2a, 0xee, 0x6d, 0x14, 0xef, 0xaf, 0x9b, 0xf7, 0xcb, 0xea, 0xde, - 0xd4, 0xcb, 0x97, 0xa5, 0x67, 0x02, 0x54, 0xd0, 0x2c, 0xb6, 0x75, 0xf6, 0x1b, 0x00, 0x00, 0xff, - 0xff, 0x0a, 0x0a, 0x26, 0x5f, 0x1b, 0x02, 0x00, 0x00, +var fileDescriptor_36ac7b8e2a2435b6 = []byte{ + // 349 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x91, 0x41, 0x4b, 0xc3, 0x30, + 0x18, 0x86, 0xd7, 0x4d, 0x37, 0x97, 0xb9, 0x4b, 0x4f, 0x41, 0xb0, 0x94, 0xe1, 0xa1, 0x97, 0xb5, + 0xa0, 0x78, 0xf1, 0xa6, 0x13, 0x44, 0x04, 0x91, 0x3a, 0x3c, 0x78, 0x70, 0xa4, 0xe9, 0x67, 0x17, + 0xd7, 0x36, 0x21, 0x49, 0xc7, 0xe6, 0xaf, 0xf0, 0x47, 0x79, 0xf0, 0xb8, 0xa3, 0x47, 0xd9, 0xfe, + 0x88, 0xac, 0xab, 0x6e, 0x05, 0x11, 0x76, 0x4b, 0x9e, 0xc0, 0xf7, 0x3e, 0x5f, 0x5e, 0xd4, 0x7f, + 0x65, 0x51, 0x00, 0x40, 0xe2, 0x98, 0x91, 0x94, 0x82, 0x17, 0x32, 0xa5, 0x25, 0x0b, 0x32, 0x0d, + 0x21, 0xe5, 0x89, 0x58, 0xd1, 0x18, 0xc2, 0x08, 0xa4, 0x27, 0x46, 0xcc, 0x13, 0x92, 0x0b, 0xae, + 0x20, 0x1c, 0x50, 0x90, 0x9a, 0x3d, 0x33, 0x4a, 0x34, 0x0c, 0x24, 0x8c, 0x39, 0x25, 0x9a, 0xf1, + 0xd4, 0x15, 0x92, 0x6b, 0x6e, 0xba, 0xe5, 0xa9, 0xee, 0x3f, 0x53, 0x5d, 0x31, 0x62, 0x07, 0x67, + 0x5b, 0x5a, 0x44, 0x92, 0xa4, 0x7a, 0x95, 0xd5, 0x79, 0xaf, 0xa2, 0xc3, 0xbb, 0xc2, 0xaa, 0xb7, + 0x96, 0xf2, 0x7f, 0x9d, 0x4c, 0x8c, 0x1a, 0x2a, 0x0b, 0x5e, 0x80, 0x6a, 0x6c, 0xd8, 0x86, 0xd3, + 0xf4, 0x7f, 0xae, 0x66, 0x07, 0xed, 0x17, 0xc7, 0x1b, 0x98, 0x5e, 0x87, 0xb8, 0x9a, 0x3f, 0x97, + 0x98, 0x79, 0x8f, 0x9a, 0x44, 0x08, 0xc9, 0xc7, 0x24, 0x56, 0xb8, 0x66, 0xd7, 0x9c, 0xd6, 0xf1, + 0xe9, 0x96, 0xfb, 0xb9, 0x57, 0x4b, 0x5f, 0x7f, 0x3d, 0xc7, 0x3c, 0x42, 0xed, 0x22, 0xe4, 0x5c, + 0xf5, 0x61, 0xa2, 0xf1, 0x4e, 0x9e, 0x5c, 0x86, 0xb9, 0x1e, 0x48, 0x46, 0xe2, 0xdb, 0x2c, 0x09, + 0x40, 0xe2, 0xdd, 0x42, 0x6f, 0x83, 0x99, 0x36, 0x6a, 0x2d, 0xbf, 0x7f, 0x04, 0xbd, 0x21, 0x8b, + 0x43, 0x5c, 0xb7, 0x0d, 0x67, 0xcf, 0xdf, 0x44, 0x79, 0x16, 0x1d, 0x42, 0x42, 0x1e, 0x40, 0x2a, + 0xc6, 0x53, 0xdc, 0xb0, 0x0d, 0xa7, 0xed, 0x97, 0xe1, 0xc5, 0xd3, 0xc7, 0xdc, 0x32, 0x66, 0x73, + 0xcb, 0xf8, 0x9a, 0x5b, 0xc6, 0xdb, 0xc2, 0xaa, 0xcc, 0x16, 0x56, 0xe5, 0x73, 0x61, 0x55, 0x1e, + 0x2f, 0x23, 0xa6, 0x87, 0x59, 0xe0, 0x52, 0x9e, 0x78, 0xab, 0xbd, 0xbb, 0x7f, 0x15, 0xd5, 0x5d, + 0x6f, 0xde, 0x2d, 0xaa, 0x9a, 0xe4, 0x65, 0xe9, 0xa9, 0x00, 0x15, 0xd4, 0xf3, 0xb6, 0x4e, 0xbe, + 0x03, 0x00, 0x00, 0xff, 0xff, 0xd7, 0xa6, 0xc4, 0xd2, 0x71, 0x02, 0x00, 0x00, } func (m *ProposedCertificateRevocation) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/query.pb.go b/x/pki/types/query.pb.go index 9cd9942ac..eac07552f 100644 --- a/x/pki/types/query.pb.go +++ b/x/pki/types/query.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/query.proto +// source: zigbeealliance/distributedcomplianceledger/pki/query.proto package types @@ -8,9 +8,9 @@ import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" query "github.com/cosmos/cosmos-sdk/types/query" - _ "github.com/gogo/protobuf/gogoproto" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -40,7 +40,7 @@ func (m *QueryGetApprovedCertificatesRequest) Reset() { *m = QueryGetApp func (m *QueryGetApprovedCertificatesRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetApprovedCertificatesRequest) ProtoMessage() {} func (*QueryGetApprovedCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{0} + return fileDescriptor_db6ffa8ae459ba1c, []int{0} } func (m *QueryGetApprovedCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -91,7 +91,7 @@ func (m *QueryGetApprovedCertificatesResponse) Reset() { *m = QueryGetAp func (m *QueryGetApprovedCertificatesResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetApprovedCertificatesResponse) ProtoMessage() {} func (*QueryGetApprovedCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{1} + return fileDescriptor_db6ffa8ae459ba1c, []int{1} } func (m *QueryGetApprovedCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -136,7 +136,7 @@ func (m *QueryAllApprovedCertificatesRequest) Reset() { *m = QueryAllApp func (m *QueryAllApprovedCertificatesRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllApprovedCertificatesRequest) ProtoMessage() {} func (*QueryAllApprovedCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{2} + return fileDescriptor_db6ffa8ae459ba1c, []int{2} } func (m *QueryAllApprovedCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -188,7 +188,7 @@ func (m *QueryAllApprovedCertificatesResponse) Reset() { *m = QueryAllAp func (m *QueryAllApprovedCertificatesResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllApprovedCertificatesResponse) ProtoMessage() {} func (*QueryAllApprovedCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{3} + return fileDescriptor_db6ffa8ae459ba1c, []int{3} } func (m *QueryAllApprovedCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -240,7 +240,7 @@ func (m *QueryGetProposedCertificateRequest) Reset() { *m = QueryGetProp func (m *QueryGetProposedCertificateRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetProposedCertificateRequest) ProtoMessage() {} func (*QueryGetProposedCertificateRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{4} + return fileDescriptor_db6ffa8ae459ba1c, []int{4} } func (m *QueryGetProposedCertificateRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -291,7 +291,7 @@ func (m *QueryGetProposedCertificateResponse) Reset() { *m = QueryGetPro func (m *QueryGetProposedCertificateResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetProposedCertificateResponse) ProtoMessage() {} func (*QueryGetProposedCertificateResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{5} + return fileDescriptor_db6ffa8ae459ba1c, []int{5} } func (m *QueryGetProposedCertificateResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -335,7 +335,7 @@ func (m *QueryAllProposedCertificateRequest) Reset() { *m = QueryAllProp func (m *QueryAllProposedCertificateRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllProposedCertificateRequest) ProtoMessage() {} func (*QueryAllProposedCertificateRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{6} + return fileDescriptor_db6ffa8ae459ba1c, []int{6} } func (m *QueryAllProposedCertificateRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -380,7 +380,7 @@ func (m *QueryAllProposedCertificateResponse) Reset() { *m = QueryAllPro func (m *QueryAllProposedCertificateResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllProposedCertificateResponse) ProtoMessage() {} func (*QueryAllProposedCertificateResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{7} + return fileDescriptor_db6ffa8ae459ba1c, []int{7} } func (m *QueryAllProposedCertificateResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -432,7 +432,7 @@ func (m *QueryGetChildCertificatesRequest) Reset() { *m = QueryGetChildC func (m *QueryGetChildCertificatesRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetChildCertificatesRequest) ProtoMessage() {} func (*QueryGetChildCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{8} + return fileDescriptor_db6ffa8ae459ba1c, []int{8} } func (m *QueryGetChildCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -483,7 +483,7 @@ func (m *QueryGetChildCertificatesResponse) Reset() { *m = QueryGetChild func (m *QueryGetChildCertificatesResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetChildCertificatesResponse) ProtoMessage() {} func (*QueryGetChildCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{9} + return fileDescriptor_db6ffa8ae459ba1c, []int{9} } func (m *QueryGetChildCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -533,7 +533,7 @@ func (m *QueryGetProposedCertificateRevocationRequest) String() string { } func (*QueryGetProposedCertificateRevocationRequest) ProtoMessage() {} func (*QueryGetProposedCertificateRevocationRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{10} + return fileDescriptor_db6ffa8ae459ba1c, []int{10} } func (m *QueryGetProposedCertificateRevocationRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -595,7 +595,7 @@ func (m *QueryGetProposedCertificateRevocationResponse) String() string { } func (*QueryGetProposedCertificateRevocationResponse) ProtoMessage() {} func (*QueryGetProposedCertificateRevocationResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{11} + return fileDescriptor_db6ffa8ae459ba1c, []int{11} } func (m *QueryGetProposedCertificateRevocationResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -643,7 +643,7 @@ func (m *QueryAllProposedCertificateRevocationRequest) String() string { } func (*QueryAllProposedCertificateRevocationRequest) ProtoMessage() {} func (*QueryAllProposedCertificateRevocationRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{12} + return fileDescriptor_db6ffa8ae459ba1c, []int{12} } func (m *QueryAllProposedCertificateRevocationRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -692,7 +692,7 @@ func (m *QueryAllProposedCertificateRevocationResponse) String() string { } func (*QueryAllProposedCertificateRevocationResponse) ProtoMessage() {} func (*QueryAllProposedCertificateRevocationResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{13} + return fileDescriptor_db6ffa8ae459ba1c, []int{13} } func (m *QueryAllProposedCertificateRevocationResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -744,7 +744,7 @@ func (m *QueryGetRevokedCertificatesRequest) Reset() { *m = QueryGetRevo func (m *QueryGetRevokedCertificatesRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetRevokedCertificatesRequest) ProtoMessage() {} func (*QueryGetRevokedCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{14} + return fileDescriptor_db6ffa8ae459ba1c, []int{14} } func (m *QueryGetRevokedCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -795,7 +795,7 @@ func (m *QueryGetRevokedCertificatesResponse) Reset() { *m = QueryGetRev func (m *QueryGetRevokedCertificatesResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetRevokedCertificatesResponse) ProtoMessage() {} func (*QueryGetRevokedCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{15} + return fileDescriptor_db6ffa8ae459ba1c, []int{15} } func (m *QueryGetRevokedCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -839,7 +839,7 @@ func (m *QueryAllRevokedCertificatesRequest) Reset() { *m = QueryAllRevo func (m *QueryAllRevokedCertificatesRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllRevokedCertificatesRequest) ProtoMessage() {} func (*QueryAllRevokedCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{16} + return fileDescriptor_db6ffa8ae459ba1c, []int{16} } func (m *QueryAllRevokedCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -884,7 +884,7 @@ func (m *QueryAllRevokedCertificatesResponse) Reset() { *m = QueryAllRev func (m *QueryAllRevokedCertificatesResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllRevokedCertificatesResponse) ProtoMessage() {} func (*QueryAllRevokedCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{17} + return fileDescriptor_db6ffa8ae459ba1c, []int{17} } func (m *QueryAllRevokedCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -936,7 +936,7 @@ func (m *QueryGetApprovedRootCertificatesRequest) Reset() { func (m *QueryGetApprovedRootCertificatesRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetApprovedRootCertificatesRequest) ProtoMessage() {} func (*QueryGetApprovedRootCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{18} + return fileDescriptor_db6ffa8ae459ba1c, []int{18} } func (m *QueryGetApprovedRootCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -975,7 +975,7 @@ func (m *QueryGetApprovedRootCertificatesResponse) Reset() { func (m *QueryGetApprovedRootCertificatesResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetApprovedRootCertificatesResponse) ProtoMessage() {} func (*QueryGetApprovedRootCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{19} + return fileDescriptor_db6ffa8ae459ba1c, []int{19} } func (m *QueryGetApprovedRootCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1020,7 +1020,7 @@ func (m *QueryGetRevokedRootCertificatesRequest) Reset() { func (m *QueryGetRevokedRootCertificatesRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetRevokedRootCertificatesRequest) ProtoMessage() {} func (*QueryGetRevokedRootCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{20} + return fileDescriptor_db6ffa8ae459ba1c, []int{20} } func (m *QueryGetRevokedRootCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1059,7 +1059,7 @@ func (m *QueryGetRevokedRootCertificatesResponse) Reset() { func (m *QueryGetRevokedRootCertificatesResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetRevokedRootCertificatesResponse) ProtoMessage() {} func (*QueryGetRevokedRootCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{21} + return fileDescriptor_db6ffa8ae459ba1c, []int{21} } func (m *QueryGetRevokedRootCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1107,7 +1107,7 @@ func (m *QueryGetApprovedCertificatesBySubjectRequest) String() string { } func (*QueryGetApprovedCertificatesBySubjectRequest) ProtoMessage() {} func (*QueryGetApprovedCertificatesBySubjectRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{22} + return fileDescriptor_db6ffa8ae459ba1c, []int{22} } func (m *QueryGetApprovedCertificatesBySubjectRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1155,7 +1155,7 @@ func (m *QueryGetApprovedCertificatesBySubjectResponse) String() string { } func (*QueryGetApprovedCertificatesBySubjectResponse) ProtoMessage() {} func (*QueryGetApprovedCertificatesBySubjectResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{23} + return fileDescriptor_db6ffa8ae459ba1c, []int{23} } func (m *QueryGetApprovedCertificatesBySubjectResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1200,7 +1200,7 @@ func (m *QueryGetRejectedCertificatesRequest) Reset() { *m = QueryGetRej func (m *QueryGetRejectedCertificatesRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetRejectedCertificatesRequest) ProtoMessage() {} func (*QueryGetRejectedCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{24} + return fileDescriptor_db6ffa8ae459ba1c, []int{24} } func (m *QueryGetRejectedCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1251,7 +1251,7 @@ func (m *QueryGetRejectedCertificatesResponse) Reset() { *m = QueryGetRe func (m *QueryGetRejectedCertificatesResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetRejectedCertificatesResponse) ProtoMessage() {} func (*QueryGetRejectedCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{25} + return fileDescriptor_db6ffa8ae459ba1c, []int{25} } func (m *QueryGetRejectedCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1295,7 +1295,7 @@ func (m *QueryAllRejectedCertificatesRequest) Reset() { *m = QueryAllRej func (m *QueryAllRejectedCertificatesRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllRejectedCertificatesRequest) ProtoMessage() {} func (*QueryAllRejectedCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{26} + return fileDescriptor_db6ffa8ae459ba1c, []int{26} } func (m *QueryAllRejectedCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1340,7 +1340,7 @@ func (m *QueryAllRejectedCertificatesResponse) Reset() { *m = QueryAllRe func (m *QueryAllRejectedCertificatesResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllRejectedCertificatesResponse) ProtoMessage() {} func (*QueryAllRejectedCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{27} + return fileDescriptor_db6ffa8ae459ba1c, []int{27} } func (m *QueryAllRejectedCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1397,7 +1397,7 @@ func (m *QueryGetPkiRevocationDistributionPointRequest) String() string { } func (*QueryGetPkiRevocationDistributionPointRequest) ProtoMessage() {} func (*QueryGetPkiRevocationDistributionPointRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{28} + return fileDescriptor_db6ffa8ae459ba1c, []int{28} } func (m *QueryGetPkiRevocationDistributionPointRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1459,7 +1459,7 @@ func (m *QueryGetPkiRevocationDistributionPointResponse) String() string { } func (*QueryGetPkiRevocationDistributionPointResponse) ProtoMessage() {} func (*QueryGetPkiRevocationDistributionPointResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{29} + return fileDescriptor_db6ffa8ae459ba1c, []int{29} } func (m *QueryGetPkiRevocationDistributionPointResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1507,7 +1507,7 @@ func (m *QueryAllPkiRevocationDistributionPointRequest) String() string { } func (*QueryAllPkiRevocationDistributionPointRequest) ProtoMessage() {} func (*QueryAllPkiRevocationDistributionPointRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{30} + return fileDescriptor_db6ffa8ae459ba1c, []int{30} } func (m *QueryAllPkiRevocationDistributionPointRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1556,7 +1556,7 @@ func (m *QueryAllPkiRevocationDistributionPointResponse) String() string { } func (*QueryAllPkiRevocationDistributionPointResponse) ProtoMessage() {} func (*QueryAllPkiRevocationDistributionPointResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{31} + return fileDescriptor_db6ffa8ae459ba1c, []int{31} } func (m *QueryAllPkiRevocationDistributionPointResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1611,7 +1611,7 @@ func (m *QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest) Str } func (*QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest) ProtoMessage() {} func (*QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{32} + return fileDescriptor_db6ffa8ae459ba1c, []int{32} } func (m *QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1659,7 +1659,7 @@ func (m *QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse) St } func (*QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse) ProtoMessage() {} func (*QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{33} + return fileDescriptor_db6ffa8ae459ba1c, []int{33} } func (m *QueryGetPkiRevocationDistributionPointsByIssuerSubjectKeyIDResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1703,7 +1703,7 @@ func (m *QueryGetNocRootCertificatesRequest) Reset() { *m = QueryGetNocR func (m *QueryGetNocRootCertificatesRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetNocRootCertificatesRequest) ProtoMessage() {} func (*QueryGetNocRootCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{34} + return fileDescriptor_db6ffa8ae459ba1c, []int{34} } func (m *QueryGetNocRootCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1747,7 +1747,7 @@ func (m *QueryGetNocRootCertificatesResponse) Reset() { *m = QueryGetNoc func (m *QueryGetNocRootCertificatesResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetNocRootCertificatesResponse) ProtoMessage() {} func (*QueryGetNocRootCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{35} + return fileDescriptor_db6ffa8ae459ba1c, []int{35} } func (m *QueryGetNocRootCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1791,7 +1791,7 @@ func (m *QueryAllNocRootCertificatesRequest) Reset() { *m = QueryAllNocR func (m *QueryAllNocRootCertificatesRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllNocRootCertificatesRequest) ProtoMessage() {} func (*QueryAllNocRootCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{36} + return fileDescriptor_db6ffa8ae459ba1c, []int{36} } func (m *QueryAllNocRootCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1836,7 +1836,7 @@ func (m *QueryAllNocRootCertificatesResponse) Reset() { *m = QueryAllNoc func (m *QueryAllNocRootCertificatesResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllNocRootCertificatesResponse) ProtoMessage() {} func (*QueryAllNocRootCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{37} + return fileDescriptor_db6ffa8ae459ba1c, []int{37} } func (m *QueryAllNocRootCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1887,7 +1887,7 @@ func (m *QueryGetNocIcaCertificatesRequest) Reset() { *m = QueryGetNocIc func (m *QueryGetNocIcaCertificatesRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetNocIcaCertificatesRequest) ProtoMessage() {} func (*QueryGetNocIcaCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{38} + return fileDescriptor_db6ffa8ae459ba1c, []int{38} } func (m *QueryGetNocIcaCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1931,7 +1931,7 @@ func (m *QueryGetNocIcaCertificatesResponse) Reset() { *m = QueryGetNocI func (m *QueryGetNocIcaCertificatesResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetNocIcaCertificatesResponse) ProtoMessage() {} func (*QueryGetNocIcaCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{39} + return fileDescriptor_db6ffa8ae459ba1c, []int{39} } func (m *QueryGetNocIcaCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1975,7 +1975,7 @@ func (m *QueryAllNocIcaCertificatesRequest) Reset() { *m = QueryAllNocIc func (m *QueryAllNocIcaCertificatesRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllNocIcaCertificatesRequest) ProtoMessage() {} func (*QueryAllNocIcaCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{40} + return fileDescriptor_db6ffa8ae459ba1c, []int{40} } func (m *QueryAllNocIcaCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2020,7 +2020,7 @@ func (m *QueryAllNocIcaCertificatesResponse) Reset() { *m = QueryAllNocI func (m *QueryAllNocIcaCertificatesResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllNocIcaCertificatesResponse) ProtoMessage() {} func (*QueryAllNocIcaCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{41} + return fileDescriptor_db6ffa8ae459ba1c, []int{41} } func (m *QueryAllNocIcaCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2076,7 +2076,7 @@ func (m *QueryGetRevokedNocRootCertificatesRequest) String() string { } func (*QueryGetRevokedNocRootCertificatesRequest) ProtoMessage() {} func (*QueryGetRevokedNocRootCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{42} + return fileDescriptor_db6ffa8ae459ba1c, []int{42} } func (m *QueryGetRevokedNocRootCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2131,7 +2131,7 @@ func (m *QueryGetRevokedNocRootCertificatesResponse) String() string { } func (*QueryGetRevokedNocRootCertificatesResponse) ProtoMessage() {} func (*QueryGetRevokedNocRootCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{43} + return fileDescriptor_db6ffa8ae459ba1c, []int{43} } func (m *QueryGetRevokedNocRootCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2179,7 +2179,7 @@ func (m *QueryAllRevokedNocRootCertificatesRequest) String() string { } func (*QueryAllRevokedNocRootCertificatesRequest) ProtoMessage() {} func (*QueryAllRevokedNocRootCertificatesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{44} + return fileDescriptor_db6ffa8ae459ba1c, []int{44} } func (m *QueryAllRevokedNocRootCertificatesRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2228,7 +2228,7 @@ func (m *QueryAllRevokedNocRootCertificatesResponse) String() string { } func (*QueryAllRevokedNocRootCertificatesResponse) ProtoMessage() {} func (*QueryAllRevokedNocRootCertificatesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{45} + return fileDescriptor_db6ffa8ae459ba1c, []int{45} } func (m *QueryAllRevokedNocRootCertificatesResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2284,7 +2284,7 @@ func (m *QueryGetNocRootCertificatesByVidAndSkidRequest) String() string { } func (*QueryGetNocRootCertificatesByVidAndSkidRequest) ProtoMessage() {} func (*QueryGetNocRootCertificatesByVidAndSkidRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{46} + return fileDescriptor_db6ffa8ae459ba1c, []int{46} } func (m *QueryGetNocRootCertificatesByVidAndSkidRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2339,7 +2339,7 @@ func (m *QueryGetNocRootCertificatesByVidAndSkidResponse) String() string { } func (*QueryGetNocRootCertificatesByVidAndSkidResponse) ProtoMessage() {} func (*QueryGetNocRootCertificatesByVidAndSkidResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_62972e0134af9ed2, []int{47} + return fileDescriptor_db6ffa8ae459ba1c, []int{47} } func (m *QueryGetNocRootCertificatesByVidAndSkidResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2426,143 +2426,146 @@ func init() { proto.RegisterType((*QueryGetNocRootCertificatesByVidAndSkidResponse)(nil), "zigbeealliance.distributedcomplianceledger.pki.QueryGetNocRootCertificatesByVidAndSkidResponse") } -func init() { proto.RegisterFile("pki/query.proto", fileDescriptor_62972e0134af9ed2) } - -var fileDescriptor_62972e0134af9ed2 = []byte{ - // 2123 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x5b, 0x5f, 0x8c, 0xdc, 0x46, - 0x19, 0xcf, 0x78, 0x49, 0x11, 0x03, 0x02, 0x3a, 0x49, 0x93, 0x63, 0x95, 0xdb, 0x4b, 0x26, 0xe9, - 0xe5, 0x5f, 0x6f, 0xdd, 0x34, 0x4a, 0x02, 0x42, 0x25, 0xba, 0xcb, 0x95, 0x6b, 0x8a, 0x38, 0xee, - 0xf6, 0x50, 0x4b, 0xab, 0x92, 0x95, 0xd7, 0xeb, 0x6e, 0xcc, 0xfa, 0xd6, 0xae, 0xed, 0x3d, 0xba, - 0x9c, 0x0e, 0x09, 0x24, 0x5a, 0xc4, 0x53, 0xf8, 0xf3, 0x00, 0xa8, 0xa2, 0x12, 0x95, 0x2a, 0xde, - 0x79, 0xe0, 0x91, 0xd7, 0x4a, 0x3c, 0x50, 0xa9, 0x0f, 0x20, 0x21, 0x21, 0x94, 0x03, 0x51, 0xd4, - 0x4a, 0x88, 0x17, 0x10, 0x2f, 0x50, 0x79, 0xfc, 0xf9, 0xec, 0x5d, 0xcf, 0xcc, 0x7a, 0xbd, 0xb3, - 0x97, 0xb7, 0xb5, 0x67, 0xfc, 0xfd, 0xf9, 0x7d, 0xbf, 0xf9, 0xe6, 0x9b, 0xf9, 0xee, 0xf0, 0xa7, - 0xbc, 0xae, 0xad, 0xbf, 0xdc, 0xb7, 0xfc, 0x41, 0xdd, 0xf3, 0xdd, 0xd0, 0x25, 0xf5, 0x6f, 0xd9, - 0x9d, 0x96, 0x65, 0x19, 0x8e, 0x63, 0x1b, 0x3d, 0xd3, 0xaa, 0xb7, 0xed, 0x20, 0xf4, 0xed, 0x56, - 0x3f, 0xb4, 0xda, 0xa6, 0xbb, 0xed, 0xc5, 0x6f, 0x1d, 0xab, 0xdd, 0xb1, 0xfc, 0xba, 0xd7, 0xb5, - 0xab, 0xa7, 0x3a, 0xae, 0xdb, 0x71, 0x2c, 0xdd, 0xf0, 0x6c, 0xdd, 0xe8, 0xf5, 0xdc, 0xd0, 0x08, - 0x6d, 0xb7, 0x17, 0xc4, 0xd2, 0xaa, 0x97, 0x4c, 0x37, 0xd8, 0x76, 0x03, 0xbd, 0x65, 0x04, 0x56, - 0xac, 0x46, 0xdf, 0xb9, 0xd2, 0xb2, 0x42, 0xe3, 0x8a, 0xee, 0x19, 0x1d, 0xbb, 0xc7, 0x26, 0xc3, - 0xdc, 0x85, 0xc8, 0x14, 0xc3, 0xf3, 0x7c, 0x77, 0xc7, 0x6a, 0x37, 0x4d, 0xcb, 0x0f, 0xed, 0x97, - 0x6c, 0xd3, 0x08, 0xad, 0x44, 0x58, 0x2d, 0x9a, 0xe0, 0xf9, 0xae, 0xe7, 0x06, 0xc3, 0x13, 0x60, - 0xfc, 0x54, 0x34, 0x6e, 0xde, 0xb5, 0x1d, 0xee, 0xd7, 0x17, 0x45, 0x5f, 0x37, 0x7d, 0x6b, 0xc7, - 0x35, 0xb3, 0x96, 0x30, 0x45, 0xd1, 0xdb, 0x2e, 0xdf, 0x90, 0x73, 0x43, 0x96, 0xfa, 0xae, 0x1b, - 0xf2, 0x66, 0x9d, 0xcd, 0x4a, 0x11, 0x4d, 0xba, 0x24, 0x74, 0xba, 0xd9, 0x1a, 0x34, 0x83, 0x7e, - 0xeb, 0x1b, 0x96, 0x19, 0x0e, 0x9b, 0x15, 0xbd, 0xe1, 0xfa, 0x7f, 0x99, 0x79, 0xd8, 0xb5, 0x33, - 0x0e, 0x35, 0x0f, 0xc2, 0x17, 0x3d, 0x78, 0xae, 0xdd, 0x4b, 0x84, 0x3d, 0x55, 0x68, 0x32, 0xb3, - 0xc2, 0x0e, 0x82, 0xbe, 0xe5, 0x27, 0xc6, 0x34, 0xbb, 0xd6, 0xa0, 0x69, 0xb7, 0xb3, 0x41, 0xeb, - 0xb9, 0xa6, 0xd0, 0xc1, 0x5a, 0x32, 0xc1, 0x36, 0x0d, 0xde, 0xf8, 0xf9, 0x2c, 0x4a, 0x32, 0x41, - 0xba, 0x50, 0x53, 0x64, 0xe3, 0x8e, 0xdd, 0x6e, 0x1a, 0xbd, 0x76, 0x33, 0xe8, 0x1e, 0x98, 0x76, - 0xbc, 0xe3, 0x76, 0x5c, 0xf6, 0x53, 0x8f, 0x7e, 0xc1, 0xdb, 0xcf, 0xc4, 0x8c, 0x6c, 0xc6, 0x03, - 0xf1, 0x43, 0x3c, 0x44, 0x4d, 0x7c, 0x76, 0x33, 0xa2, 0xe8, 0x9a, 0x15, 0x2e, 0x43, 0x44, 0x6e, - 0x65, 0xd4, 0x34, 0xac, 0x97, 0xfb, 0x56, 0x10, 0x92, 0x39, 0xfc, 0x51, 0x80, 0x62, 0x0e, 0x9d, - 0x46, 0x17, 0x3e, 0xd6, 0x48, 0x1e, 0x09, 0xc5, 0x9f, 0x80, 0x9f, 0x5f, 0xb2, 0x06, 0xb7, 0xdb, - 0x73, 0x1a, 0x1b, 0x1e, 0x7a, 0x47, 0xdf, 0x42, 0xf8, 0x9c, 0x5c, 0x4b, 0xe0, 0xb9, 0xbd, 0xc0, - 0x22, 0xdf, 0xc6, 0xc7, 0x0d, 0xce, 0x38, 0xd3, 0xf9, 0xf1, 0x27, 0x56, 0x27, 0x5c, 0xa7, 0x75, - 0x9e, 0xae, 0x95, 0x8f, 0xbc, 0xfd, 0xe7, 0x85, 0x23, 0x0d, 0xae, 0x1e, 0xfa, 0x43, 0x04, 0x70, - 0x2c, 0x3b, 0x8e, 0x0c, 0x8e, 0x2f, 0x62, 0x9c, 0x2e, 0x65, 0xb0, 0x6e, 0xb1, 0x0e, 0xc0, 0x46, - 0xeb, 0xbe, 0x1e, 0xa7, 0x17, 0x58, 0xf7, 0xf5, 0x0d, 0xa3, 0x63, 0xc1, 0xb7, 0x8d, 0xcc, 0x97, - 0x85, 0xc0, 0xfb, 0x67, 0x02, 0x9e, 0xd0, 0xa6, 0xb1, 0xe0, 0x55, 0x0e, 0x03, 0x3c, 0xb2, 0x36, - 0x04, 0x8a, 0xc6, 0x40, 0x39, 0x3f, 0x16, 0x94, 0xd8, 0xf8, 0x2c, 0x2a, 0xb4, 0x85, 0x69, 0xc2, - 0x96, 0x0d, 0xc8, 0x5d, 0x19, 0x45, 0x6a, 0x28, 0xf9, 0x4b, 0x94, 0x12, 0x9f, 0xab, 0x04, 0x40, - 0xdd, 0xc5, 0xc7, 0xbc, 0xfc, 0x30, 0x84, 0xfc, 0xd6, 0xa4, 0x98, 0x72, 0x34, 0x01, 0xa4, 0x3c, - 0x2d, 0xd4, 0x01, 0x20, 0x96, 0x1d, 0x47, 0x02, 0x84, 0x22, 0x32, 0xd2, 0xf7, 0x33, 0xe4, 0x2f, - 0x05, 0x49, 0x65, 0xf6, 0x90, 0xa8, 0x24, 0xd9, 0xe9, 0x24, 0xfe, 0xb7, 0xa2, 0xed, 0x93, 0xb7, - 0xcc, 0x4f, 0xe0, 0x87, 0xe2, 0x7d, 0x00, 0x18, 0x06, 0x4f, 0x64, 0x11, 0x7f, 0xd2, 0xe8, 0x87, - 0x77, 0x5d, 0xdf, 0x0e, 0x07, 0x59, 0x8a, 0x8d, 0xbc, 0xa5, 0x3f, 0x47, 0xf8, 0x8c, 0x44, 0x09, - 0xe0, 0xd9, 0xc7, 0x0f, 0x9b, 0xa3, 0x83, 0x10, 0xc6, 0xe5, 0x49, 0xd1, 0xcc, 0x69, 0x01, 0x2c, - 0xf3, 0x1a, 0xe8, 0x3d, 0x84, 0x1f, 0x93, 0xae, 0x80, 0x64, 0x8b, 0x54, 0xb2, 0xe0, 0xd8, 0x1c, - 0xcb, 0xb7, 0x0d, 0x67, 0xbd, 0xbf, 0xdd, 0xb2, 0xfc, 0xb9, 0x0a, 0xcc, 0xc9, 0xbc, 0xa3, 0xbf, - 0x43, 0x78, 0xa9, 0xa0, 0x49, 0x80, 0xdd, 0xcf, 0x10, 0x9e, 0xf7, 0x64, 0x33, 0x01, 0xc8, 0x2f, - 0x2b, 0xa0, 0x65, 0x2a, 0x14, 0x40, 0x95, 0x6b, 0xa6, 0x3b, 0x80, 0xaf, 0x68, 0x39, 0x8d, 0xe2, - 0xab, 0x6a, 0x1d, 0xff, 0x44, 0x03, 0x14, 0xc7, 0x2b, 0x9e, 0x00, 0xc5, 0xca, 0x83, 0x41, 0x71, - 0x26, 0xbb, 0x4a, 0x23, 0x2e, 0xbd, 0xd4, 0x17, 0x3a, 0xd9, 0x5d, 0x85, 0xab, 0x24, 0x4d, 0xa1, - 0x7e, 0x7e, 0xb8, 0xec, 0xae, 0xc2, 0xd1, 0x94, 0xa4, 0x50, 0x8e, 0x96, 0xec, 0xae, 0x22, 0x01, - 0x62, 0x16, 0xbb, 0x4a, 0x29, 0x48, 0x2a, 0xb3, 0x87, 0x44, 0x1d, 0xc9, 0x2e, 0xe2, 0xf3, 0xa3, - 0x85, 0x6e, 0xc3, 0x75, 0x43, 0x0e, 0xc0, 0xf4, 0x37, 0x08, 0x5f, 0x18, 0x3f, 0x17, 0xd0, 0xf9, - 0x01, 0xc2, 0x73, 0x86, 0x60, 0x12, 0x04, 0xe7, 0xe9, 0xb2, 0x05, 0xde, 0xa8, 0x3c, 0x00, 0x4a, - 0xa8, 0x8f, 0x5e, 0xc0, 0x8b, 0x23, 0x24, 0x17, 0xf9, 0xf8, 0x6b, 0x94, 0xe2, 0x21, 0x9c, 0x0a, - 0x2e, 0xbe, 0x86, 0xf0, 0x49, 0x9f, 0x3f, 0x07, 0x3c, 0x5c, 0x2b, 0xc9, 0x02, 0x81, 0x83, 0x22, - 0x6d, 0xf4, 0xe9, 0x74, 0x63, 0xe4, 0x16, 0xc1, 0x83, 0xad, 0x78, 0xcd, 0x8f, 0xcd, 0x19, 0x43, - 0x1b, 0xda, 0x18, 0x51, 0x99, 0x54, 0x6c, 0xc8, 0x66, 0x96, 0xdd, 0xd0, 0xa4, 0xea, 0x93, 0x54, - 0x2c, 0xd5, 0x9c, 0x3d, 0x2b, 0x36, 0xe0, 0x44, 0xae, 0x3e, 0x85, 0xbe, 0x99, 0x39, 0x2b, 0xf2, - 0xb5, 0x64, 0x13, 0x46, 0x6e, 0xbc, 0x7c, 0x0e, 0xcd, 0x89, 0x4a, 0x13, 0x46, 0x6e, 0x88, 0x6e, - 0x67, 0x93, 0x9a, 0x18, 0x0a, 0x55, 0x49, 0xf4, 0x83, 0xcc, 0x19, 0xb0, 0x1c, 0x28, 0x95, 0xd9, - 0x83, 0xa2, 0x2e, 0x8b, 0xbe, 0x96, 0xad, 0x03, 0xbb, 0x76, 0x5a, 0x0d, 0xac, 0x66, 0x2e, 0x6b, - 0x36, 0x5c, 0xbb, 0x77, 0xb0, 0x04, 0x3f, 0x8d, 0x2b, 0x3b, 0x76, 0x9b, 0x21, 0x7c, 0xb4, 0x11, - 0xfd, 0x24, 0xc7, 0xf1, 0x51, 0xc7, 0x68, 0x59, 0x0e, 0x90, 0x2c, 0x7e, 0x20, 0x75, 0x4c, 0xe2, - 0x1a, 0x7e, 0x2b, 0xe5, 0xdc, 0x2a, 0xd4, 0xa2, 0x9c, 0x11, 0xfa, 0x7b, 0x84, 0xeb, 0x45, 0x2d, - 0x81, 0x10, 0xbc, 0x8e, 0x70, 0x4d, 0x3e, 0x15, 0x88, 0xb0, 0x3e, 0x71, 0x35, 0x25, 0x95, 0x0a, - 0x91, 0x19, 0xa3, 0x9b, 0x7e, 0x33, 0x53, 0x1c, 0x16, 0x82, 0x56, 0x15, 0x87, 0x7f, 0xaa, 0x01, - 0x94, 0x05, 0x34, 0x4f, 0x02, 0x65, 0xe5, 0x41, 0x41, 0xa9, 0x8e, 0xef, 0x21, 0x5e, 0x29, 0x46, - 0xb2, 0x60, 0x65, 0x70, 0x3b, 0x47, 0xd2, 0x24, 0x50, 0x7c, 0x6e, 0x23, 0x21, 0xb7, 0xdf, 0xd4, - 0xf0, 0xad, 0xa9, 0xd4, 0x42, 0x94, 0xfe, 0x84, 0xf0, 0x55, 0x6f, 0xf2, 0xef, 0x81, 0x4a, 0xa6, - 0xda, 0xd0, 0x71, 0x55, 0x41, 0x3c, 0xcb, 0x58, 0x49, 0xaf, 0xa7, 0xc7, 0x86, 0x75, 0xd7, 0x14, - 0x14, 0x3a, 0xf9, 0xfc, 0x33, 0x74, 0x14, 0xe0, 0x7e, 0x98, 0x66, 0xec, 0x5e, 0x7e, 0xb8, 0xec, - 0x36, 0xc6, 0xd1, 0x94, 0x64, 0x6c, 0x8e, 0x96, 0xec, 0x51, 0x40, 0xe2, 0xdc, 0x2c, 0x8e, 0x02, - 0xa5, 0x20, 0xa9, 0xcc, 0x1e, 0x12, 0x75, 0x8b, 0xfa, 0x5a, 0x7a, 0xf7, 0xb3, 0xee, 0x9a, 0xb7, - 0x4d, 0xa3, 0x18, 0x6f, 0x7e, 0x81, 0x86, 0x08, 0x97, 0xfb, 0x0e, 0x30, 0x7a, 0x05, 0x93, 0x5e, - 0x6e, 0x14, 0x62, 0xb3, 0x52, 0x02, 0xa2, 0x11, 0x49, 0x80, 0x10, 0x47, 0x07, 0xed, 0x82, 0x5f, - 0x71, 0x10, 0x05, 0x7e, 0xa9, 0xa2, 0xcc, 0xdf, 0xd1, 0x10, 0x43, 0x27, 0x45, 0xa3, 0x32, 0x6b, - 0x34, 0xd4, 0xd1, 0xc5, 0xc6, 0x17, 0x47, 0x4e, 0x4a, 0x92, 0x15, 0x39, 0x5d, 0x89, 0xfd, 0x5b, - 0x84, 0x2f, 0x15, 0xd1, 0x05, 0xe0, 0xde, 0x43, 0xb8, 0xea, 0x0b, 0xa7, 0x41, 0x70, 0x9f, 0x29, - 0x79, 0x36, 0x13, 0xaf, 0x4e, 0x89, 0x4e, 0x1a, 0x00, 0x58, 0xe9, 0x9d, 0xc2, 0x21, 0xa4, 0xaf, - 0xef, 0x6b, 0x00, 0xdb, 0x18, 0xad, 0x45, 0x61, 0xab, 0x1c, 0x36, 0x6c, 0xea, 0xc8, 0xfa, 0x52, - 0x5a, 0x15, 0xf3, 0x2c, 0x19, 0x3c, 0x6b, 0xb7, 0x97, 0x7b, 0xed, 0xad, 0xae, 0xdd, 0x16, 0x17, - 0xe8, 0x45, 0x98, 0xfa, 0x2e, 0xc2, 0x7a, 0x61, 0x45, 0x80, 0xfb, 0x1b, 0x08, 0x2f, 0xf4, 0xe4, - 0x73, 0x81, 0x04, 0x5f, 0x51, 0xb1, 0x95, 0x64, 0xc4, 0x42, 0x04, 0xc6, 0x69, 0x7f, 0xe2, 0xbd, - 0xc7, 0xf1, 0x51, 0xe6, 0x15, 0xf9, 0x2f, 0xc2, 0xc7, 0x79, 0x07, 0x73, 0xb2, 0x35, 0xa9, 0x69, - 0x05, 0x9a, 0xb8, 0xd5, 0xaf, 0xaa, 0x15, 0x1a, 0xe3, 0x4d, 0xaf, 0x7f, 0xf7, 0xdd, 0xbf, 0xfe, - 0x58, 0x7b, 0x9c, 0xd4, 0xf5, 0xb6, 0xe9, 0xb0, 0x86, 0x75, 0xb6, 0x4f, 0xad, 0xef, 0x42, 0x40, - 0xf7, 0x0e, 0x7e, 0xb1, 0xd0, 0xee, 0x91, 0x7d, 0x84, 0x4f, 0xf2, 0x04, 0x2f, 0x3b, 0x4e, 0x49, - 0xf7, 0xe5, 0x4d, 0xdb, 0x92, 0xee, 0x8f, 0xe9, 0xba, 0xd2, 0x79, 0xe6, 0xfe, 0x49, 0xf2, 0x08, - 0xd7, 0x7d, 0xf2, 0x7f, 0x84, 0x8f, 0x71, 0x6e, 0xc1, 0x49, 0xa3, 0x6c, 0x2c, 0xc4, 0x8d, 0xc0, - 0xea, 0x96, 0x52, 0x99, 0xe0, 0xdf, 0x4d, 0xe6, 0xdf, 0xe7, 0xc8, 0x8d, 0x03, 0xff, 0x92, 0xfb, - 0xfa, 0xa5, 0x62, 0x71, 0xfe, 0x07, 0xc2, 0x27, 0x38, 0x0a, 0xa2, 0x30, 0x37, 0xca, 0x46, 0x44, - 0x39, 0x08, 0xf2, 0x96, 0x27, 0x5d, 0x64, 0x20, 0x9c, 0x26, 0x35, 0x39, 0x08, 0xe4, 0x3f, 0x08, - 0x3f, 0x9c, 0x6b, 0xc1, 0x91, 0x8d, 0xb2, 0x71, 0x11, 0x35, 0x26, 0xab, 0x9b, 0x0a, 0x25, 0x82, - 0x8b, 0x4f, 0x32, 0x17, 0x6f, 0x90, 0x6b, 0x29, 0x8f, 0xa3, 0xb9, 0x23, 0x41, 0x8e, 0x8f, 0x91, - 0x7b, 0xfa, 0xee, 0x70, 0xa7, 0x73, 0x8f, 0xfc, 0x4a, 0xc3, 0xf3, 0xd2, 0x6e, 0x0f, 0x79, 0x51, - 0x29, 0x3b, 0x47, 0x9a, 0x67, 0xd5, 0xaf, 0xcf, 0x48, 0x3a, 0xa0, 0xf3, 0x0c, 0x43, 0x67, 0x95, - 0xac, 0xe4, 0x09, 0x90, 0xfe, 0x2d, 0x51, 0xc1, 0x05, 0xf1, 0x23, 0x0d, 0x9f, 0x96, 0x6a, 0x8d, - 0x96, 0xc6, 0x8b, 0x4a, 0x69, 0xac, 0x06, 0xad, 0xa2, 0xfd, 0x44, 0x7a, 0x85, 0xa1, 0x75, 0x99, - 0x5c, 0x2c, 0x8c, 0x16, 0xf9, 0x1f, 0xc2, 0xc7, 0x38, 0x4d, 0x9b, 0xf2, 0x79, 0x52, 0xdc, 0xda, - 0x2a, 0x9f, 0x27, 0x25, 0xfd, 0x2b, 0xfa, 0x05, 0xe6, 0xf3, 0x67, 0xc9, 0xf5, 0x03, 0x9f, 0xa1, - 0x10, 0x2b, 0xc8, 0x8a, 0xf7, 0x10, 0x3e, 0xc1, 0x91, 0x3f, 0x55, 0x9a, 0x54, 0x8e, 0x81, 0xbc, - 0x87, 0x47, 0x1f, 0x65, 0x18, 0x2c, 0x90, 0x79, 0x29, 0x06, 0xe4, 0x5f, 0x08, 0xcf, 0x89, 0x9a, - 0x4f, 0xe4, 0xb9, 0x69, 0x8b, 0x14, 0xc1, 0x31, 0xa0, 0xfa, 0x35, 0xf5, 0x82, 0xc1, 0x6d, 0xca, - 0xdc, 0x3e, 0x45, 0xaa, 0xa9, 0xdb, 0xae, 0x1b, 0x0e, 0xfb, 0xfc, 0x6f, 0x84, 0x4f, 0x0a, 0xda, - 0x51, 0xe4, 0xd9, 0x29, 0xf9, 0x28, 0xf2, 0xf8, 0x39, 0xe5, 0x72, 0xc1, 0xe1, 0x4b, 0xcc, 0xe1, - 0x73, 0x84, 0xe6, 0xe2, 0x9c, 0x77, 0xfc, 0x7b, 0x1a, 0x9e, 0x97, 0xf6, 0x9e, 0xca, 0x6f, 0x0c, - 0x45, 0x9a, 0x73, 0xe5, 0x37, 0x86, 0x42, 0xfd, 0x3a, 0x7a, 0x9e, 0x41, 0x71, 0x86, 0x2c, 0x8c, - 0xa9, 0x7e, 0xc9, 0x77, 0xb4, 0x28, 0xc1, 0xe5, 0x9b, 0x26, 0x53, 0x24, 0x23, 0x61, 0xdf, 0xa9, - 0x7c, 0xa5, 0x2f, 0x6b, 0x2e, 0x71, 0x4a, 0xc1, 0xa4, 0x0b, 0x54, 0x30, 0xc7, 0xbd, 0xcf, 0x72, - 0x5c, 0x4e, 0xc3, 0x54, 0x15, 0xbf, 0x7a, 0x18, 0xc6, 0xf4, 0xd8, 0x38, 0xc5, 0x20, 0x17, 0x06, - 0xf2, 0x96, 0x36, 0xae, 0x7b, 0x41, 0xca, 0x57, 0x2d, 0x45, 0x5a, 0x37, 0xd5, 0x3b, 0xb3, 0x12, - 0x0f, 0x48, 0x3c, 0xc5, 0x90, 0xb8, 0x49, 0x9e, 0x1c, 0xca, 0x03, 0xb0, 0xbd, 0xc7, 0x7f, 0x4b, - 0x9d, 0x94, 0x8c, 0xd9, 0x3b, 0xf5, 0x3d, 0x7d, 0x77, 0xc7, 0x6e, 0xef, 0xe9, 0xbb, 0xac, 0x27, - 0xb7, 0x47, 0x5e, 0xd5, 0xf0, 0x19, 0xb9, 0xc6, 0x88, 0x21, 0xe5, 0x6b, 0x96, 0x19, 0x62, 0x55, - 0xb8, 0x97, 0xc5, 0xdb, 0x24, 0x46, 0xb1, 0x22, 0x7f, 0xd0, 0xf0, 0xd5, 0x12, 0xed, 0x0d, 0xe2, - 0xcf, 0x26, 0xce, 0xb2, 0x6e, 0x53, 0x35, 0x38, 0x54, 0x9d, 0x00, 0xe2, 0x0d, 0x06, 0xe2, 0x15, - 0xa2, 0x4f, 0x48, 0x38, 0xf2, 0x01, 0xc2, 0xc7, 0x78, 0x37, 0x62, 0xa5, 0xcb, 0x4b, 0xf1, 0x7d, - 0x63, 0xf9, 0xf2, 0x52, 0x72, 0x9b, 0x48, 0x1f, 0x63, 0x9e, 0x2f, 0x92, 0x73, 0x07, 0x9e, 0xf7, - 0x5c, 0x33, 0xbf, 0xdd, 0xc6, 0x2b, 0x8b, 0xbc, 0xa1, 0xe1, 0x85, 0x31, 0x97, 0x55, 0xe4, 0x8e, - 0x42, 0x33, 0x39, 0x37, 0x7e, 0xd5, 0xe6, 0xcc, 0xe4, 0x03, 0x24, 0x9f, 0x67, 0x90, 0x5c, 0x23, - 0x57, 0xb3, 0x90, 0xe4, 0xff, 0x53, 0x22, 0x49, 0x36, 0xf9, 0x5b, 0x09, 0x8e, 0xa2, 0xa9, 0xca, - 0x6d, 0xe5, 0x9c, 0x90, 0xf7, 0xc9, 0x38, 0x1b, 0x11, 0x97, 0x13, 0xd1, 0xd1, 0x82, 0xe4, 0x9b, - 0x1a, 0x64, 0x73, 0x8a, 0x00, 0xf1, 0xdb, 0x3e, 0xd5, 0x86, 0x4a, 0x91, 0xe0, 0xe5, 0x65, 0xe6, - 0xe5, 0xa3, 0xe4, 0xec, 0x90, 0x97, 0xb6, 0x69, 0xf0, 0x88, 0xff, 0x37, 0x84, 0x1f, 0xc9, 0xcb, - 0x8a, 0xa2, 0xba, 0x39, 0x45, 0x04, 0x94, 0x7a, 0x2b, 0xed, 0x64, 0x71, 0x8e, 0x50, 0x3c, 0x6f, - 0xc9, 0xeb, 0x1a, 0xae, 0x8a, 0x5b, 0x01, 0xe4, 0xf9, 0x29, 0x2b, 0x7f, 0x09, 0x93, 0x5f, 0x98, - 0x85, 0x68, 0x70, 0x7e, 0x8d, 0x39, 0xbf, 0x4c, 0x6e, 0xe6, 0xce, 0x15, 0x82, 0x64, 0x27, 0x2a, - 0x34, 0x5f, 0xd5, 0xf0, 0xbc, 0x58, 0x5f, 0x44, 0x87, 0xe7, 0xa7, 0x3c, 0xff, 0x2a, 0x47, 0xa8, - 0x50, 0x53, 0x89, 0xd6, 0x19, 0x42, 0x17, 0xc8, 0x62, 0x31, 0x84, 0x56, 0xee, 0xbc, 0x7d, 0xbf, - 0x86, 0xde, 0xb9, 0x5f, 0x43, 0x7f, 0xb9, 0x5f, 0x43, 0xf7, 0xf6, 0x6b, 0x47, 0xde, 0xd9, 0xaf, - 0x1d, 0xf9, 0xe3, 0x7e, 0xed, 0xc8, 0x0b, 0xab, 0x1d, 0x3b, 0xbc, 0xdb, 0x6f, 0xd5, 0x4d, 0x77, - 0x5b, 0x8f, 0xed, 0x5d, 0x4a, 0x0c, 0xd6, 0x33, 0x06, 0x2f, 0xa5, 0x16, 0x2f, 0xc5, 0x26, 0xeb, - 0xaf, 0x30, 0xbd, 0xe1, 0xc0, 0xb3, 0x82, 0xd6, 0x43, 0xec, 0xbf, 0xc8, 0xae, 0x7e, 0x18, 0x00, - 0x00, 0xff, 0xff, 0x6d, 0x73, 0x7b, 0xfa, 0x53, 0x39, 0x00, 0x00, +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/query.proto", fileDescriptor_db6ffa8ae459ba1c) +} + +var fileDescriptor_db6ffa8ae459ba1c = []byte{ + // 2131 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x5b, 0x4d, 0x8c, 0x23, 0x47, + 0x15, 0xde, 0x6a, 0xb3, 0x41, 0x14, 0x08, 0x91, 0xda, 0xcd, 0xee, 0x60, 0x65, 0x3c, 0xbb, 0x95, + 0xcd, 0xec, 0x4f, 0x32, 0xee, 0x4c, 0x46, 0xbb, 0xcb, 0x8f, 0xc2, 0x6a, 0x7e, 0x60, 0x32, 0x8b, + 0x98, 0xcc, 0x78, 0xc2, 0x86, 0xac, 0xc2, 0x5a, 0xed, 0x76, 0xc7, 0xdb, 0xb8, 0xc7, 0xdd, 0xe9, + 0x6e, 0x0f, 0x31, 0xa3, 0x41, 0x02, 0x89, 0x04, 0x71, 0x5a, 0x7e, 0x0e, 0x80, 0x22, 0x22, 0x11, + 0x29, 0xe2, 0xce, 0x81, 0x23, 0xd7, 0x48, 0x1c, 0x88, 0x94, 0x03, 0x48, 0x48, 0x08, 0xed, 0x80, + 0x08, 0x4a, 0x24, 0xc4, 0x05, 0xc4, 0x05, 0xa2, 0xae, 0x7e, 0x9e, 0x6e, 0xbb, 0xab, 0xcb, 0xed, + 0x72, 0x79, 0x72, 0xb3, 0xbb, 0xaa, 0xdf, 0xab, 0xf7, 0xbd, 0xaf, 0xde, 0x7b, 0x55, 0xcf, 0xc6, + 0x9f, 0xfb, 0x96, 0xdd, 0x6a, 0x58, 0x96, 0xe1, 0x38, 0xb6, 0xd1, 0x31, 0x2d, 0xbd, 0x69, 0x07, + 0xa1, 0x6f, 0x37, 0xba, 0xa1, 0xd5, 0x34, 0xdd, 0x5d, 0x2f, 0x7e, 0xea, 0x58, 0xcd, 0x96, 0xe5, + 0xeb, 0x5e, 0xdb, 0xd6, 0x5f, 0xea, 0x5a, 0x7e, 0xaf, 0xea, 0xf9, 0x6e, 0xe8, 0x92, 0xea, 0xe0, + 0xbb, 0x55, 0xc1, 0xbb, 0x55, 0xaf, 0x6d, 0x97, 0x1f, 0x6e, 0xb9, 0x6e, 0xcb, 0xb1, 0x74, 0xc3, + 0xb3, 0x75, 0xa3, 0xd3, 0x71, 0x43, 0x23, 0xb4, 0xdd, 0x4e, 0x10, 0x4b, 0x2b, 0x5f, 0x31, 0xdd, + 0x60, 0xd7, 0x0d, 0xf4, 0x86, 0x11, 0x58, 0xb1, 0x1a, 0x7d, 0x6f, 0xb1, 0x61, 0x85, 0xc6, 0xa2, + 0xee, 0x19, 0x2d, 0xbb, 0xc3, 0x26, 0xc3, 0xdc, 0x9b, 0x63, 0xae, 0xda, 0xf0, 0x3c, 0xdf, 0xdd, + 0xb3, 0x9a, 0x75, 0xd3, 0xf2, 0x43, 0xfb, 0x45, 0xdb, 0x34, 0x42, 0xab, 0xaf, 0x77, 0x63, 0x4c, + 0x59, 0x9e, 0xef, 0x7a, 0x6e, 0x30, 0x28, 0x0b, 0x44, 0xad, 0x8f, 0x29, 0xca, 0xbc, 0x6b, 0x3b, + 0xdc, 0x35, 0x3d, 0xab, 0x60, 0x4d, 0x75, 0xdf, 0xda, 0x73, 0xcd, 0x34, 0x6a, 0xe3, 0x5a, 0x1a, + 0x09, 0x68, 0xf3, 0x41, 0x7b, 0x46, 0xd6, 0x01, 0xbe, 0xeb, 0x86, 0x3c, 0x81, 0x9b, 0x92, 0x6b, + 0xcb, 0x93, 0xf7, 0x55, 0x15, 0x0c, 0xa9, 0x37, 0x7a, 0xf5, 0xa0, 0xdb, 0xf8, 0x86, 0x65, 0x86, + 0xd2, 0x10, 0x46, 0x2f, 0x73, 0xc9, 0x72, 0x6b, 0x5c, 0x1f, 0xb7, 0xed, 0x94, 0x4b, 0xeb, 0x47, + 0xd3, 0xa3, 0x2f, 0x9e, 0x6b, 0x77, 0xfa, 0x4b, 0xf4, 0x54, 0xcb, 0x65, 0x30, 0xd8, 0x41, 0xd0, + 0xb5, 0xfc, 0x3e, 0x1a, 0xf5, 0xb6, 0xd5, 0xab, 0xdb, 0x4d, 0xc9, 0xdd, 0xd8, 0x71, 0xcd, 0x5c, + 0xbf, 0x6d, 0x48, 0xc8, 0xb2, 0x4d, 0x83, 0x27, 0xaa, 0x26, 0x49, 0x29, 0xd1, 0xf2, 0x6e, 0xab, + 0x30, 0x35, 0xc2, 0x73, 0xcf, 0x6e, 0xd6, 0x8d, 0x4e, 0xb3, 0x1e, 0xb4, 0x8f, 0x60, 0x3c, 0xdd, + 0x72, 0x5b, 0x2e, 0xfb, 0xa8, 0x47, 0x9f, 0xe0, 0xe9, 0xa7, 0xe3, 0xb0, 0x58, 0x8f, 0x07, 0xe2, + 0x2f, 0xf1, 0x10, 0x35, 0xf1, 0x23, 0xdb, 0x51, 0x9c, 0x5c, 0xb7, 0xc2, 0x65, 0xa0, 0xef, 0x6a, + 0x4a, 0x4d, 0xcd, 0x7a, 0xa9, 0x6b, 0x05, 0x21, 0x99, 0xc1, 0x1f, 0x05, 0xb7, 0xcd, 0xa0, 0x73, + 0xe8, 0xd2, 0xc7, 0x6a, 0xfd, 0xaf, 0x84, 0xe2, 0x4f, 0xc0, 0xc7, 0x2f, 0x5b, 0xbd, 0x8d, 0xe6, + 0x8c, 0xc6, 0x86, 0x07, 0x9e, 0xd1, 0x37, 0x11, 0xbe, 0x20, 0xd6, 0x12, 0x78, 0x6e, 0x27, 0xb0, + 0xc8, 0xb7, 0xf1, 0x69, 0x83, 0x33, 0xce, 0x74, 0x7e, 0xfc, 0xc9, 0xb5, 0x31, 0x93, 0x45, 0x95, + 0xa7, 0x6b, 0xe5, 0x23, 0x6f, 0xfd, 0x79, 0xee, 0x44, 0x8d, 0xab, 0x87, 0xfe, 0x10, 0x01, 0x1c, + 0xcb, 0x8e, 0x23, 0x82, 0xe3, 0x4b, 0x18, 0x27, 0xf9, 0x04, 0x56, 0x37, 0x5f, 0x05, 0x60, 0xa3, + 0xe4, 0x53, 0x8d, 0x73, 0x1c, 0x24, 0x9f, 0xea, 0x96, 0xd1, 0xb2, 0xe0, 0xdd, 0x5a, 0xea, 0xcd, + 0x42, 0xe0, 0xfd, 0xb3, 0x0f, 0x5e, 0xee, 0x9a, 0x46, 0x82, 0x57, 0x3a, 0x0e, 0xf0, 0xc8, 0xfa, + 0x00, 0x28, 0x1a, 0x03, 0xe5, 0xe2, 0x48, 0x50, 0xe2, 0xc5, 0xa7, 0x51, 0xa1, 0x0d, 0x4c, 0xfb, + 0x6c, 0xd9, 0x82, 0xa4, 0x94, 0x52, 0xa4, 0x86, 0x92, 0xbf, 0x44, 0x09, 0xf1, 0xb9, 0x4a, 0x00, + 0xd4, 0x7d, 0x7c, 0xca, 0xcb, 0x0e, 0x83, 0xcb, 0x57, 0xc7, 0xc5, 0x94, 0xa3, 0x09, 0x20, 0xe5, + 0x69, 0xa1, 0x0e, 0x00, 0xb1, 0xec, 0x38, 0x02, 0x20, 0x14, 0x91, 0x91, 0xbe, 0x97, 0x22, 0xbf, + 0x14, 0x24, 0xa5, 0xe9, 0x43, 0xa2, 0x92, 0x64, 0xe7, 0xfa, 0xfe, 0x5f, 0x8d, 0x4a, 0x28, 0xde, + 0x36, 0x3f, 0x83, 0x1f, 0x88, 0x73, 0x16, 0x30, 0x0c, 0xbe, 0x91, 0x79, 0xfc, 0x49, 0xa3, 0x1b, + 0xde, 0x75, 0x7d, 0x3b, 0xec, 0xa5, 0x29, 0x36, 0xf4, 0x94, 0xfe, 0x1c, 0xe1, 0xf3, 0x02, 0x25, + 0x80, 0x67, 0x17, 0x3f, 0x68, 0x0e, 0x0f, 0x82, 0x1b, 0x97, 0xc7, 0x45, 0x33, 0xa3, 0x05, 0xb0, + 0xcc, 0x6a, 0xa0, 0xf7, 0x10, 0x7e, 0x5c, 0xb8, 0x03, 0xfa, 0xe9, 0x5c, 0xc9, 0x86, 0x63, 0x73, + 0x2c, 0xdf, 0x36, 0x9c, 0xcd, 0xee, 0x6e, 0xc3, 0xf2, 0x67, 0x4a, 0x30, 0x27, 0xf5, 0x8c, 0xfe, + 0x0e, 0xe1, 0x85, 0x82, 0x4b, 0x02, 0xec, 0x7e, 0x86, 0xf0, 0xac, 0x27, 0x9a, 0x09, 0x40, 0x7e, + 0x45, 0x01, 0x2d, 0x13, 0xa1, 0x00, 0xaa, 0x58, 0x33, 0xdd, 0x03, 0x7c, 0xf3, 0xb6, 0xd3, 0x30, + 0xbe, 0xaa, 0xf6, 0xf1, 0x4f, 0x34, 0x40, 0x71, 0xb4, 0xe2, 0x31, 0x50, 0x2c, 0x7d, 0x38, 0x28, + 0x4e, 0x25, 0xab, 0xd4, 0xe2, 0x2a, 0x4d, 0x7d, 0xa1, 0x93, 0xce, 0x2a, 0x5c, 0x25, 0x49, 0x08, + 0xf5, 0xb3, 0xc3, 0xb2, 0x59, 0x85, 0xa3, 0xa9, 0x1f, 0x42, 0x39, 0x5a, 0xd2, 0x59, 0x45, 0x00, + 0xc4, 0x34, 0xb2, 0x8a, 0x14, 0x24, 0xa5, 0xe9, 0x43, 0xa2, 0x8e, 0x64, 0x97, 0xf1, 0xc5, 0xe1, + 0x42, 0xb7, 0xe6, 0xba, 0x21, 0x07, 0x60, 0xfa, 0x1b, 0x84, 0x2f, 0x8d, 0x9e, 0x0b, 0xe8, 0xfc, + 0x00, 0xe1, 0x19, 0x23, 0x67, 0x12, 0x38, 0xe7, 0x69, 0xd9, 0x02, 0x6f, 0x58, 0x1e, 0x00, 0x95, + 0xab, 0x8f, 0x5e, 0xc2, 0xf3, 0x43, 0x24, 0xcf, 0xb3, 0xf1, 0xd7, 0x28, 0xc1, 0x23, 0x77, 0x2a, + 0x98, 0xf8, 0x2a, 0xc2, 0x67, 0x7d, 0xfe, 0x1c, 0xb0, 0x70, 0x5d, 0x92, 0x05, 0x39, 0x06, 0xe6, + 0x69, 0xa3, 0x4f, 0x27, 0x89, 0x91, 0x5b, 0x04, 0xf7, 0x76, 0xe2, 0x3d, 0x3f, 0x32, 0x66, 0x0c, + 0x24, 0xb4, 0x11, 0xa2, 0x52, 0xa1, 0xd8, 0x10, 0xcd, 0x94, 0x4d, 0x68, 0x42, 0xf5, 0xfd, 0x50, + 0x2c, 0xd4, 0x9c, 0x3e, 0x2b, 0xd6, 0xe0, 0x4e, 0x42, 0x7d, 0x08, 0x7d, 0x23, 0x75, 0x56, 0xe4, + 0x6b, 0x49, 0x07, 0x8c, 0xcc, 0xb8, 0x7c, 0x0c, 0xcd, 0x88, 0x4a, 0x02, 0x46, 0x66, 0x88, 0xee, + 0xa6, 0x83, 0x5a, 0x3e, 0x14, 0xaa, 0x82, 0xe8, 0xfb, 0xa9, 0x33, 0xa0, 0x1c, 0x28, 0xa5, 0xe9, + 0x83, 0xa2, 0x2e, 0x8a, 0xbe, 0x9a, 0xae, 0x03, 0xdb, 0x76, 0x52, 0x0d, 0xac, 0xa5, 0x2e, 0x96, + 0xb6, 0x5c, 0xbb, 0x73, 0xb4, 0x05, 0x3f, 0x85, 0x4b, 0x7b, 0x76, 0x93, 0x21, 0x7c, 0xb2, 0x16, + 0x7d, 0x24, 0xa7, 0xf1, 0x49, 0xc7, 0x68, 0x58, 0x0e, 0x90, 0x2c, 0xfe, 0x42, 0xaa, 0x98, 0xc4, + 0x35, 0xfc, 0x4e, 0xc2, 0xb9, 0x35, 0xa8, 0x45, 0x39, 0x23, 0xf4, 0xf7, 0x08, 0x57, 0x8b, 0xae, + 0x04, 0x5c, 0xf0, 0x1a, 0xc2, 0x15, 0xf1, 0x54, 0x20, 0xc2, 0xe6, 0xd8, 0xd5, 0x94, 0x50, 0x2a, + 0x78, 0x66, 0x84, 0x6e, 0xfa, 0xcd, 0x54, 0x71, 0x58, 0x08, 0x5a, 0x55, 0x1c, 0xfe, 0xa9, 0x06, + 0x50, 0x16, 0xd0, 0x3c, 0x0e, 0x94, 0xa5, 0x0f, 0x0b, 0x4a, 0x75, 0x7c, 0x0f, 0xf1, 0x4a, 0x31, + 0x92, 0x05, 0x2b, 0xbd, 0x8d, 0x0c, 0x49, 0xfb, 0x8e, 0xe2, 0x73, 0x1b, 0xe5, 0x72, 0xfb, 0x0d, + 0x0d, 0xaf, 0x4e, 0xa4, 0x16, 0xbc, 0xf4, 0x27, 0x84, 0x97, 0xbc, 0xf1, 0xdf, 0x07, 0x2a, 0x99, + 0x6a, 0x5d, 0xc7, 0x55, 0x05, 0xfe, 0x94, 0x59, 0x25, 0xbd, 0x96, 0x1c, 0x1b, 0x36, 0x5d, 0x33, + 0xa7, 0xd0, 0xc9, 0xc6, 0x9f, 0x81, 0xa3, 0x00, 0xf7, 0xc5, 0x24, 0x62, 0x77, 0xb2, 0xc3, 0xb2, + 0x69, 0x8c, 0xa3, 0xa9, 0x1f, 0xb1, 0x39, 0x5a, 0xd2, 0x47, 0x01, 0x81, 0x71, 0xd3, 0x38, 0x0a, + 0x48, 0x41, 0x52, 0x9a, 0x3e, 0x24, 0xea, 0x36, 0xf5, 0xd5, 0xe4, 0xee, 0x67, 0xd3, 0x35, 0x37, + 0x4c, 0xa3, 0x18, 0x6f, 0x7e, 0x81, 0x06, 0x08, 0x97, 0x79, 0x0f, 0x30, 0x7a, 0x19, 0x93, 0x4e, + 0x66, 0x14, 0x7c, 0xb3, 0x22, 0x01, 0xd1, 0x90, 0x24, 0x40, 0x88, 0xa3, 0x83, 0xb6, 0xc1, 0xae, + 0xd8, 0x89, 0x39, 0x76, 0xa9, 0xa2, 0xcc, 0xdf, 0xd1, 0x00, 0x43, 0xc7, 0x45, 0xa3, 0x34, 0x6d, + 0x34, 0xd4, 0xd1, 0xc5, 0xc6, 0x97, 0x87, 0x4e, 0x4a, 0x82, 0x1d, 0x39, 0x59, 0x89, 0xfd, 0x5b, + 0x84, 0xaf, 0x14, 0xd1, 0x05, 0xe0, 0xde, 0x43, 0xb8, 0xec, 0xe7, 0x4e, 0x03, 0xe7, 0xde, 0x94, + 0x3c, 0x9b, 0xe5, 0xef, 0x4e, 0x81, 0x4e, 0x1a, 0x00, 0x58, 0xc9, 0x9d, 0xc2, 0x31, 0x84, 0xaf, + 0xef, 0x6b, 0x00, 0xdb, 0x08, 0xad, 0x45, 0x61, 0x2b, 0x1d, 0x37, 0x6c, 0xea, 0xc8, 0xfa, 0x62, + 0x52, 0x15, 0xf3, 0x56, 0xd2, 0xbb, 0x65, 0x37, 0x97, 0x3b, 0xcd, 0x9d, 0xb6, 0xdd, 0xcc, 0x2f, + 0xd0, 0x8b, 0x30, 0xf5, 0x1d, 0x84, 0xf5, 0xc2, 0x8a, 0x00, 0xf7, 0xd7, 0x11, 0x9e, 0xeb, 0x88, + 0xe7, 0x02, 0x09, 0x9e, 0x51, 0x91, 0x4a, 0x52, 0x62, 0xc1, 0x03, 0xa3, 0xb4, 0x3f, 0xf9, 0xee, + 0x13, 0xf8, 0x24, 0xb3, 0x8a, 0xfc, 0x17, 0xe1, 0xd3, 0xbc, 0x83, 0x39, 0xd9, 0x19, 0x77, 0x69, + 0x05, 0x9a, 0xb8, 0xe5, 0x67, 0xd5, 0x0a, 0x8d, 0xf1, 0xa6, 0xd7, 0xbe, 0xfb, 0xce, 0x5f, 0x7f, + 0xac, 0x3d, 0x41, 0xaa, 0x7a, 0xd3, 0x74, 0xe2, 0x9f, 0xa4, 0xa4, 0xa6, 0xe9, 0xfb, 0xe0, 0xd0, + 0x83, 0xa3, 0x4f, 0xcc, 0xb5, 0x07, 0xe4, 0x10, 0xe1, 0xb3, 0x3c, 0xc1, 0xcb, 0x8e, 0x23, 0x69, + 0xbe, 0xb8, 0x69, 0x2b, 0x69, 0xfe, 0x88, 0xae, 0x2b, 0x9d, 0x65, 0xe6, 0x9f, 0x25, 0x0f, 0x71, + 0xcd, 0x27, 0xff, 0x47, 0xf8, 0x14, 0xe7, 0x16, 0x9c, 0xd4, 0x64, 0x7d, 0x91, 0xdf, 0x08, 0x2c, + 0xef, 0x28, 0x95, 0x09, 0xf6, 0xdd, 0x60, 0xf6, 0x7d, 0x96, 0x5c, 0x3f, 0xb2, 0xaf, 0x7f, 0x5f, + 0xbf, 0x50, 0xcc, 0xcf, 0xff, 0x40, 0xf8, 0x0c, 0x47, 0x41, 0xe4, 0xe6, 0x9a, 0xac, 0x47, 0x94, + 0x83, 0x20, 0x6e, 0x79, 0xd2, 0x79, 0x06, 0xc2, 0x39, 0x52, 0x11, 0x83, 0x40, 0xfe, 0x83, 0xf0, + 0x83, 0x99, 0x16, 0x1c, 0xd9, 0x92, 0xf5, 0x4b, 0x5e, 0x63, 0xb2, 0xbc, 0xad, 0x50, 0x22, 0x98, + 0xf8, 0x14, 0x33, 0xf1, 0x3a, 0xb9, 0x9a, 0xf0, 0x38, 0x9a, 0x3b, 0xe4, 0xe4, 0xf8, 0x18, 0x79, + 0xa0, 0xef, 0x0f, 0x76, 0x3a, 0x0f, 0xc8, 0xaf, 0x34, 0x3c, 0x2b, 0xec, 0xf6, 0x90, 0x17, 0x94, + 0xb2, 0x73, 0xa8, 0x79, 0x56, 0xfe, 0xfa, 0x94, 0xa4, 0x03, 0x3a, 0x37, 0x19, 0x3a, 0x6b, 0x64, + 0x25, 0x4b, 0x80, 0xe4, 0x77, 0x4f, 0x05, 0x37, 0xc4, 0x8f, 0x34, 0x7c, 0x4e, 0xa8, 0x35, 0xda, + 0x1a, 0x2f, 0x28, 0xa5, 0xb1, 0x1a, 0xb4, 0x8a, 0xf6, 0x13, 0xe9, 0x22, 0x43, 0xeb, 0x31, 0x72, + 0xb9, 0x30, 0x5a, 0xe4, 0x7f, 0x08, 0x9f, 0xe2, 0x34, 0x6d, 0xe4, 0xe3, 0x64, 0x7e, 0x6b, 0x4b, + 0x3e, 0x4e, 0x0a, 0xfa, 0x57, 0xf4, 0x0b, 0xcc, 0xe6, 0xcf, 0x90, 0x6b, 0x47, 0x36, 0x43, 0x21, + 0x56, 0x90, 0x15, 0xef, 0x22, 0x7c, 0x86, 0x23, 0x7f, 0xa2, 0x30, 0xa9, 0x1c, 0x03, 0x71, 0x0f, + 0x8f, 0x3e, 0xca, 0x30, 0x98, 0x23, 0xb3, 0x42, 0x0c, 0xc8, 0xbf, 0x10, 0x9e, 0xc9, 0x6b, 0x3e, + 0x91, 0xe7, 0x26, 0x2d, 0x52, 0x72, 0x8e, 0x01, 0xe5, 0xaf, 0xa9, 0x17, 0x0c, 0x66, 0x53, 0x66, + 0xf6, 0xc3, 0xa4, 0x9c, 0x98, 0xed, 0xba, 0xe1, 0xa0, 0xcd, 0xff, 0x46, 0xf8, 0x6c, 0x4e, 0x3b, + 0x8a, 0xdc, 0x9a, 0x90, 0x8f, 0x79, 0x16, 0x3f, 0xa7, 0x5c, 0x2e, 0x18, 0x7c, 0x85, 0x19, 0x7c, + 0x81, 0xd0, 0x8c, 0x9f, 0xb3, 0x86, 0x7f, 0x4f, 0xc3, 0xb3, 0xc2, 0xde, 0x93, 0x7c, 0x62, 0x28, + 0xd2, 0x9c, 0x93, 0x4f, 0x0c, 0x85, 0xfa, 0x75, 0xf4, 0x22, 0x83, 0xe2, 0x3c, 0x99, 0x1b, 0x51, + 0xfd, 0x92, 0xef, 0x68, 0x51, 0x80, 0xcb, 0x36, 0x4d, 0x26, 0x08, 0x46, 0xb9, 0x7d, 0x27, 0xf9, + 0x4a, 0x5f, 0xd4, 0x5c, 0xe2, 0x94, 0x82, 0xfd, 0x2e, 0x50, 0xc1, 0x18, 0xf7, 0x1e, 0x8b, 0x71, + 0x19, 0x0d, 0x13, 0x55, 0xfc, 0xea, 0x61, 0x18, 0xd1, 0x63, 0xe3, 0x14, 0x83, 0x5c, 0x18, 0xc8, + 0x9b, 0xda, 0xa8, 0xee, 0x05, 0x91, 0xaf, 0x5a, 0x8a, 0xb4, 0x6e, 0xca, 0x77, 0xa6, 0x25, 0x1e, + 0x90, 0xf8, 0x22, 0x43, 0xe2, 0x06, 0x79, 0x6a, 0x20, 0x0e, 0x40, 0x7a, 0x8f, 0x7f, 0xf7, 0xdd, + 0x2f, 0x19, 0xd3, 0x77, 0xea, 0x07, 0xfa, 0xfe, 0x9e, 0xdd, 0x3c, 0xd0, 0xf7, 0x59, 0x4f, 0xee, + 0x80, 0xbc, 0xa2, 0xe1, 0xf3, 0x62, 0x8d, 0x11, 0x43, 0xe4, 0x6b, 0x96, 0x29, 0x62, 0x55, 0xb8, + 0x97, 0xc5, 0x4b, 0x12, 0xc3, 0x58, 0x91, 0x3f, 0x68, 0x78, 0x49, 0xa2, 0xbd, 0x41, 0xfc, 0xe9, + 0xf8, 0x59, 0xd4, 0x6d, 0x2a, 0x07, 0xc7, 0xaa, 0x13, 0x40, 0xbc, 0xce, 0x40, 0x5c, 0x24, 0xfa, + 0x98, 0x84, 0x23, 0xef, 0x23, 0x7c, 0x8a, 0x77, 0x23, 0x26, 0x5d, 0x5e, 0xe6, 0xdf, 0x37, 0xca, + 0x97, 0x97, 0x82, 0xdb, 0x44, 0xfa, 0x38, 0xb3, 0x7c, 0x9e, 0x5c, 0x38, 0xb2, 0xbc, 0xe3, 0x9a, + 0xd9, 0x74, 0x1b, 0xef, 0x2c, 0xf2, 0xba, 0x86, 0xe7, 0x46, 0x5c, 0x56, 0x91, 0x3b, 0x0a, 0x97, + 0xc9, 0xb9, 0xf1, 0x2b, 0xd7, 0xa7, 0x26, 0x1f, 0x20, 0xf9, 0x3c, 0x83, 0xe4, 0x2a, 0x59, 0x2a, + 0x02, 0x09, 0xef, 0x56, 0x82, 0xa3, 0x68, 0xa2, 0x72, 0x5b, 0x39, 0x27, 0xc4, 0x7d, 0x32, 0x4e, + 0x22, 0xe2, 0x02, 0x10, 0x1d, 0x2d, 0x48, 0xb6, 0xa9, 0x41, 0xb6, 0x27, 0x70, 0x10, 0xbf, 0xed, + 0x53, 0xae, 0xa9, 0x14, 0x09, 0x56, 0x3e, 0xc6, 0xac, 0x7c, 0x94, 0x3c, 0x32, 0x60, 0xa5, 0x6d, + 0x1a, 0x3c, 0xe2, 0xff, 0x0d, 0xe1, 0x87, 0xb2, 0xb2, 0x22, 0xaf, 0x6e, 0x4f, 0xe0, 0x01, 0xa5, + 0xd6, 0x0a, 0x3b, 0x59, 0x9c, 0x23, 0x14, 0xcf, 0x5a, 0xf2, 0x9a, 0x86, 0xcb, 0xf9, 0xad, 0x00, + 0xf2, 0xfc, 0x84, 0x95, 0xbf, 0x80, 0xc9, 0xb7, 0xa7, 0x21, 0x1a, 0x8c, 0x5f, 0x67, 0xc6, 0x2f, + 0x93, 0x1b, 0x99, 0x73, 0x45, 0xce, 0xce, 0xce, 0x2b, 0x34, 0x5f, 0xd1, 0xf0, 0x6c, 0xbe, 0xbe, + 0x88, 0x0e, 0xcf, 0x4f, 0x78, 0xfe, 0x55, 0x8e, 0x50, 0xa1, 0xa6, 0x12, 0xad, 0x32, 0x84, 0x2e, + 0x91, 0xf9, 0x62, 0x08, 0xad, 0xdc, 0x79, 0xeb, 0x7e, 0x05, 0xbd, 0x7d, 0xbf, 0x82, 0xfe, 0x72, + 0xbf, 0x82, 0xee, 0x1d, 0x56, 0x4e, 0xbc, 0x7d, 0x58, 0x39, 0xf1, 0xc7, 0xc3, 0xca, 0x89, 0xdb, + 0x6b, 0x2d, 0x3b, 0xbc, 0xdb, 0x6d, 0x54, 0x4d, 0x77, 0x57, 0x8f, 0xd7, 0xbb, 0xc0, 0xfb, 0x47, + 0xda, 0x42, 0xb2, 0xe2, 0x05, 0xf8, 0x4f, 0xda, 0xcb, 0x4c, 0x6f, 0xd8, 0xf3, 0xac, 0xa0, 0xf1, + 0x00, 0xfb, 0x17, 0xd9, 0xd2, 0x07, 0x01, 0x00, 0x00, 0xff, 0xff, 0x7e, 0x67, 0x34, 0x90, 0x03, + 0x3c, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -3518,7 +3521,7 @@ var _Query_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "pki/query.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/pki/query.proto", } func (m *QueryGetApprovedCertificatesRequest) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/query.pb.gw.go b/x/pki/types/query.pb.gw.go index c7f2170d6..8f5f0df02 100644 --- a/x/pki/types/query.pb.gw.go +++ b/x/pki/types/query.pb.gw.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: pki/query.proto +// source: zigbeealliance/distributedcomplianceledger/pki/query.proto /* Package types is a reverse proxy. diff --git a/x/pki/types/rejected_certificate.pb.go b/x/pki/types/rejected_certificate.pb.go index 4386cd466..751adc847 100644 --- a/x/pki/types/rejected_certificate.pb.go +++ b/x/pki/types/rejected_certificate.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/rejected_certificate.proto +// source: zigbeealliance/distributedcomplianceledger/pki/rejected_certificate.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -33,7 +33,7 @@ func (m *RejectedCertificate) Reset() { *m = RejectedCertificate{} } func (m *RejectedCertificate) String() string { return proto.CompactTextString(m) } func (*RejectedCertificate) ProtoMessage() {} func (*RejectedCertificate) Descriptor() ([]byte, []int) { - return fileDescriptor_830b292fc8b89847, []int{0} + return fileDescriptor_e80fca8b529705dd, []int{0} } func (m *RejectedCertificate) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -94,28 +94,30 @@ func init() { proto.RegisterType((*RejectedCertificate)(nil), "zigbeealliance.distributedcomplianceledger.pki.RejectedCertificate") } -func init() { proto.RegisterFile("pki/rejected_certificate.proto", fileDescriptor_830b292fc8b89847) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/rejected_certificate.proto", fileDescriptor_e80fca8b529705dd) +} -var fileDescriptor_830b292fc8b89847 = []byte{ - // 274 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x2b, 0xc8, 0xce, 0xd4, - 0x2f, 0x4a, 0xcd, 0x4a, 0x4d, 0x2e, 0x49, 0x4d, 0x89, 0x4f, 0x4e, 0x2d, 0x2a, 0xc9, 0x4c, 0xcb, - 0x4c, 0x4e, 0x2c, 0x49, 0xd5, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xd2, 0xab, 0xca, 0x4c, 0x4f, - 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, +var fileDescriptor_e80fca8b529705dd = []byte{ + // 276 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xf2, 0xac, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, - 0xa7, 0x16, 0xe9, 0x15, 0x64, 0x67, 0x4a, 0x89, 0x82, 0xcc, 0xc3, 0x30, 0x46, 0xe9, 0x0c, 0x23, - 0x97, 0x70, 0x10, 0xd4, 0x16, 0x67, 0x84, 0xac, 0x90, 0x04, 0x17, 0x7b, 0x71, 0x69, 0x12, 0x48, - 0x5c, 0x82, 0x51, 0x81, 0x51, 0x83, 0x33, 0x08, 0xc6, 0x15, 0x52, 0xe2, 0xe2, 0x81, 0x32, 0xbd, - 0x53, 0x2b, 0x3d, 0x53, 0x24, 0x98, 0xc0, 0xd2, 0x28, 0x62, 0x42, 0x81, 0x5c, 0xac, 0x20, 0xab, - 0x8a, 0x25, 0x98, 0x15, 0x98, 0x35, 0xb8, 0x8d, 0xac, 0x49, 0x74, 0xac, 0x1e, 0x92, 0x4b, 0x82, - 0x20, 0x26, 0x09, 0xa9, 0x70, 0xf1, 0x16, 0x27, 0x67, 0xa4, 0xe6, 0x26, 0x86, 0xa5, 0x16, 0x15, - 0x67, 0xe6, 0xe7, 0x49, 0xb0, 0x28, 0x30, 0x6a, 0xf0, 0x06, 0xa1, 0x0a, 0x3a, 0xc5, 0x9d, 0x78, - 0x24, 0xc7, 0x78, 0xe1, 0x91, 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x13, 0x1e, 0xcb, 0x31, 0x5c, - 0x78, 0x2c, 0xc7, 0x70, 0xe3, 0xb1, 0x1c, 0x43, 0x94, 0x4b, 0x7a, 0x66, 0x49, 0x46, 0x69, 0x92, - 0x5e, 0x72, 0x7e, 0xae, 0x3e, 0xc4, 0x35, 0xba, 0x30, 0xe7, 0xe8, 0x23, 0x39, 0x47, 0x17, 0xe1, - 0x1e, 0x5d, 0x88, 0x83, 0xf4, 0x2b, 0xf4, 0x41, 0x41, 0x57, 0x52, 0x59, 0x90, 0x5a, 0x9c, 0xc4, - 0x06, 0x0e, 0x35, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0xa0, 0x95, 0x77, 0xb5, 0x9e, 0x01, - 0x00, 0x00, + 0xa7, 0x16, 0xe9, 0x17, 0x64, 0x67, 0xea, 0x17, 0xa5, 0x66, 0xa5, 0x26, 0x97, 0xa4, 0xa6, 0xc4, + 0x27, 0xa7, 0x16, 0x95, 0x64, 0xa6, 0x65, 0x26, 0x27, 0x96, 0xa4, 0xea, 0x15, 0x14, 0xe5, 0x97, + 0xe4, 0x0b, 0xe9, 0xa1, 0x1a, 0xa5, 0x87, 0xc7, 0x28, 0xbd, 0x82, 0xec, 0x4c, 0x29, 0x07, 0x12, + 0xad, 0xc6, 0xb0, 0x51, 0xe9, 0x0c, 0x23, 0x97, 0x70, 0x10, 0xd4, 0x41, 0xce, 0x08, 0x59, 0x21, + 0x09, 0x2e, 0xf6, 0xe2, 0xd2, 0x24, 0x90, 0xb8, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x67, 0x10, 0x8c, + 0x2b, 0xa4, 0xc4, 0xc5, 0x03, 0x65, 0x7a, 0xa7, 0x56, 0x7a, 0xa6, 0x48, 0x30, 0x81, 0xa5, 0x51, + 0xc4, 0x84, 0x02, 0xb9, 0x58, 0x41, 0x56, 0x15, 0x4b, 0x30, 0x2b, 0x30, 0x6b, 0x70, 0x1b, 0x59, + 0x93, 0xe8, 0x2f, 0x3d, 0x24, 0x97, 0x04, 0x41, 0x4c, 0x12, 0x52, 0xe1, 0xe2, 0x2d, 0x4e, 0xce, + 0x48, 0xcd, 0x4d, 0x0c, 0x4b, 0x2d, 0x2a, 0xce, 0xcc, 0xcf, 0x93, 0x60, 0x51, 0x60, 0xd4, 0xe0, + 0x0d, 0x42, 0x15, 0x74, 0x8a, 0x3b, 0xf1, 0x48, 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, + 0xe4, 0x18, 0x27, 0x3c, 0x96, 0x63, 0xb8, 0xf0, 0x58, 0x8e, 0xe1, 0xc6, 0x63, 0x39, 0x86, 0x28, + 0x97, 0xf4, 0xcc, 0x92, 0x8c, 0xd2, 0x24, 0xbd, 0xe4, 0xfc, 0x5c, 0x7d, 0x88, 0x6b, 0x74, 0xb1, + 0x05, 0x9b, 0x2e, 0xc2, 0x3d, 0xba, 0xd0, 0x80, 0xab, 0x00, 0x07, 0x5d, 0x49, 0x65, 0x41, 0x6a, + 0x71, 0x12, 0x1b, 0x38, 0xd4, 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x1e, 0xe4, 0x48, 0x84, + 0xf4, 0x01, 0x00, 0x00, } func (m *RejectedCertificate) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/revoked_certificates.pb.go b/x/pki/types/revoked_certificates.pb.go index bd62df050..e182332e9 100644 --- a/x/pki/types/revoked_certificates.pb.go +++ b/x/pki/types/revoked_certificates.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/revoked_certificates.proto +// source: zigbeealliance/distributedcomplianceledger/pki/revoked_certificates.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -33,7 +33,7 @@ func (m *RevokedCertificates) Reset() { *m = RevokedCertificates{} } func (m *RevokedCertificates) String() string { return proto.CompactTextString(m) } func (*RevokedCertificates) ProtoMessage() {} func (*RevokedCertificates) Descriptor() ([]byte, []int) { - return fileDescriptor_cca42ddac598ffcf, []int{0} + return fileDescriptor_47bf09de83e16ea0, []int{0} } func (m *RevokedCertificates) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -94,28 +94,30 @@ func init() { proto.RegisterType((*RevokedCertificates)(nil), "zigbeealliance.distributedcomplianceledger.pki.RevokedCertificates") } -func init() { proto.RegisterFile("pki/revoked_certificates.proto", fileDescriptor_cca42ddac598ffcf) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/revoked_certificates.proto", fileDescriptor_47bf09de83e16ea0) +} -var fileDescriptor_cca42ddac598ffcf = []byte{ - // 278 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x90, 0xbd, 0x4e, 0xc3, 0x30, - 0x14, 0x85, 0x63, 0xca, 0x8f, 0x30, 0x74, 0x09, 0x42, 0x8a, 0x18, 0xac, 0xa8, 0x62, 0xc8, 0x12, - 0x47, 0x82, 0x91, 0x0d, 0x58, 0x10, 0x13, 0x19, 0x18, 0x18, 0x40, 0x89, 0x73, 0x49, 0x2f, 0x49, - 0xea, 0xc8, 0x76, 0x10, 0xe5, 0x29, 0x78, 0x2c, 0x06, 0x86, 0x8e, 0x8c, 0x28, 0x79, 0x11, 0x94, - 0x9a, 0x8a, 0x74, 0xec, 0x66, 0x9f, 0x2b, 0x7d, 0xe7, 0xd3, 0xa1, 0xac, 0x2e, 0x30, 0x52, 0xf0, - 0x2a, 0x0b, 0xc8, 0x9e, 0x04, 0x28, 0x83, 0xcf, 0x28, 0x12, 0x03, 0x9a, 0xd7, 0x4a, 0x1a, 0xe9, - 0xf2, 0x77, 0xcc, 0x53, 0x80, 0xa4, 0x2c, 0x31, 0x99, 0x09, 0xe0, 0x19, 0x6a, 0xa3, 0x30, 0x6d, - 0x0c, 0x64, 0x42, 0x56, 0xb5, 0x4d, 0x4b, 0xc8, 0x72, 0x50, 0xbc, 0x2e, 0xf0, 0xe4, 0xb8, 0xe7, - 0x0d, 0x38, 0x16, 0x33, 0xf9, 0x22, 0xf4, 0x28, 0xb6, 0x2d, 0x57, 0x83, 0x12, 0xd7, 0xa3, 0x7b, - 0xba, 0x49, 0x5f, 0x40, 0x18, 0x8f, 0xf8, 0x24, 0xd8, 0x8f, 0x57, 0x5f, 0x77, 0x42, 0x0f, 0xff, - 0x9e, 0xb7, 0x30, 0xbf, 0xc9, 0xbc, 0xad, 0xe5, 0x79, 0x2d, 0x73, 0xef, 0xe8, 0x4e, 0x5f, 0xa5, - 0xbd, 0x91, 0x3f, 0x0a, 0x0e, 0xce, 0x2e, 0x36, 0x94, 0xe5, 0x03, 0x95, 0xd8, 0x92, 0xdc, 0x53, - 0x3a, 0xd6, 0x62, 0x0a, 0x55, 0x72, 0x0f, 0x4a, 0xa3, 0x9c, 0x79, 0xdb, 0x3e, 0x09, 0xc6, 0xf1, - 0x7a, 0x78, 0xf9, 0xf8, 0xd9, 0x32, 0xb2, 0x68, 0x19, 0xf9, 0x69, 0x19, 0xf9, 0xe8, 0x98, 0xb3, - 0xe8, 0x98, 0xf3, 0xdd, 0x31, 0xe7, 0xe1, 0x3a, 0x47, 0x33, 0x6d, 0x52, 0x2e, 0x64, 0x15, 0x59, - 0x9b, 0x70, 0xa5, 0x13, 0x0d, 0x74, 0xc2, 0x7f, 0x9f, 0xd0, 0x0a, 0x45, 0x6f, 0x51, 0x3f, 0x9d, - 0x99, 0xd7, 0xa0, 0xd3, 0xdd, 0xe5, 0x6a, 0xe7, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x32, 0x4a, - 0x49, 0x62, 0x9e, 0x01, 0x00, 0x00, +var fileDescriptor_47bf09de83e16ea0 = []byte{ + // 281 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xf2, 0xac, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x17, 0x64, 0x67, 0xea, 0x17, 0xa5, 0x96, 0xe5, 0x67, 0xa7, 0xa6, 0xc4, 0x27, + 0xa7, 0x16, 0x95, 0x64, 0xa6, 0x65, 0x26, 0x27, 0x96, 0xa4, 0x16, 0xeb, 0x15, 0x14, 0xe5, 0x97, + 0xe4, 0x0b, 0xe9, 0xa1, 0x1a, 0xa5, 0x87, 0xc7, 0x28, 0xbd, 0x82, 0xec, 0x4c, 0x29, 0x07, 0x12, + 0xad, 0x46, 0xb2, 0x12, 0x62, 0xa3, 0xd2, 0x19, 0x46, 0x2e, 0xe1, 0x20, 0x88, 0x83, 0x9c, 0x91, + 0xdc, 0x23, 0x24, 0xc1, 0xc5, 0x5e, 0x5c, 0x9a, 0x94, 0x95, 0x9a, 0x5c, 0x22, 0xc1, 0xa8, 0xc0, + 0xa8, 0xc1, 0x19, 0x04, 0xe3, 0x0a, 0x29, 0x71, 0xf1, 0x40, 0x99, 0xde, 0xa9, 0x95, 0x9e, 0x29, + 0x12, 0x4c, 0x60, 0x69, 0x14, 0x31, 0xa1, 0x40, 0x2e, 0x56, 0x90, 0x55, 0xc5, 0x12, 0xcc, 0x0a, + 0xcc, 0x1a, 0xdc, 0x46, 0xd6, 0x24, 0xfa, 0x4b, 0x0f, 0xc9, 0x29, 0x41, 0x10, 0x93, 0x84, 0x54, + 0xb8, 0x78, 0x8b, 0x93, 0x33, 0x52, 0x73, 0x13, 0xc3, 0x52, 0x8b, 0x8a, 0x33, 0xf3, 0xf3, 0x24, + 0x58, 0x14, 0x18, 0x35, 0x78, 0x83, 0x50, 0x05, 0x9d, 0xe2, 0x4e, 0x3c, 0x92, 0x63, 0xbc, 0xf0, + 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x09, 0x8f, 0xe5, 0x18, 0x2e, 0x3c, 0x96, 0x63, 0xb8, + 0xf1, 0x58, 0x8e, 0x21, 0xca, 0x25, 0x3d, 0xb3, 0x24, 0xa3, 0x34, 0x49, 0x2f, 0x39, 0x3f, 0x57, + 0x1f, 0xe2, 0x1a, 0x5d, 0x6c, 0xc1, 0xa6, 0x8b, 0x70, 0x8f, 0x2e, 0x34, 0xe0, 0x2a, 0xc0, 0x41, + 0x57, 0x52, 0x59, 0x90, 0x5a, 0x9c, 0xc4, 0x06, 0x0e, 0x35, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, + 0xff, 0x27, 0x95, 0x6b, 0x99, 0xf4, 0x01, 0x00, 0x00, } func (m *RevokedCertificates) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/revoked_noc_root_certificates.pb.go b/x/pki/types/revoked_noc_root_certificates.pb.go index 6e061beb3..b049fa6af 100644 --- a/x/pki/types/revoked_noc_root_certificates.pb.go +++ b/x/pki/types/revoked_noc_root_certificates.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/revoked_noc_root_certificates.proto +// source: zigbeealliance/distributedcomplianceledger/pki/revoked_noc_root_certificates.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -32,7 +32,7 @@ func (m *RevokedNocRootCertificates) Reset() { *m = RevokedNocRootCertif func (m *RevokedNocRootCertificates) String() string { return proto.CompactTextString(m) } func (*RevokedNocRootCertificates) ProtoMessage() {} func (*RevokedNocRootCertificates) Descriptor() ([]byte, []int) { - return fileDescriptor_ab9c4f154f441df5, []int{0} + return fileDescriptor_05cdc636552868bf, []int{0} } func (m *RevokedNocRootCertificates) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -87,28 +87,28 @@ func init() { } func init() { - proto.RegisterFile("pki/revoked_noc_root_certificates.proto", fileDescriptor_ab9c4f154f441df5) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/revoked_noc_root_certificates.proto", fileDescriptor_05cdc636552868bf) } -var fileDescriptor_ab9c4f154f441df5 = []byte{ - // 268 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x2f, 0xc8, 0xce, 0xd4, - 0x2f, 0x4a, 0x2d, 0xcb, 0xcf, 0x4e, 0x4d, 0x89, 0xcf, 0xcb, 0x4f, 0x8e, 0x2f, 0xca, 0xcf, 0x2f, - 0x89, 0x4f, 0x4e, 0x2d, 0x2a, 0xc9, 0x4c, 0xcb, 0x4c, 0x4e, 0x2c, 0x49, 0x2d, 0xd6, 0x2b, 0x28, - 0xca, 0x2f, 0xc9, 0x17, 0xd2, 0xab, 0xca, 0x4c, 0x4f, 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, - 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, - 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, 0xa7, 0x16, 0xe9, 0x15, 0x64, 0x67, 0x4a, - 0x89, 0x82, 0x0c, 0x46, 0x32, 0x07, 0x62, 0x8c, 0xd2, 0x5a, 0x46, 0x2e, 0xa9, 0x20, 0x88, 0x75, - 0x7e, 0xf9, 0xc9, 0x41, 0xf9, 0xf9, 0x25, 0xce, 0x48, 0x76, 0x09, 0x49, 0x70, 0xb1, 0x17, 0x97, - 0x26, 0x65, 0xa5, 0x26, 0x97, 0x48, 0x30, 0x2a, 0x30, 0x6a, 0x70, 0x06, 0xc1, 0xb8, 0x42, 0x4a, - 0x5c, 0x3c, 0x50, 0xa6, 0x77, 0x6a, 0xa5, 0x67, 0x8a, 0x04, 0x13, 0x58, 0x1a, 0x45, 0x4c, 0x28, - 0x90, 0x8b, 0x15, 0x64, 0x63, 0xb1, 0x04, 0xb3, 0x02, 0xb3, 0x06, 0xb7, 0x91, 0x35, 0x89, 0x6e, - 0xd6, 0x43, 0x72, 0x4a, 0x10, 0xc4, 0x24, 0xa7, 0xb8, 0x13, 0x8f, 0xe4, 0x18, 0x2f, 0x3c, 0x92, - 0x63, 0x7c, 0xf0, 0x48, 0x8e, 0x71, 0xc2, 0x63, 0x39, 0x86, 0x0b, 0x8f, 0xe5, 0x18, 0x6e, 0x3c, - 0x96, 0x63, 0x88, 0x72, 0x49, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x87, - 0xd8, 0xa3, 0x0b, 0xb3, 0x48, 0x1f, 0xc9, 0x22, 0x5d, 0x84, 0x4d, 0xba, 0x10, 0xab, 0xf4, 0x2b, - 0xf4, 0x41, 0x61, 0x53, 0x52, 0x59, 0x90, 0x5a, 0x9c, 0xc4, 0x06, 0x0e, 0x16, 0x63, 0x40, 0x00, - 0x00, 0x00, 0xff, 0xff, 0xf9, 0x0d, 0x7d, 0xa5, 0x88, 0x01, 0x00, 0x00, +var fileDescriptor_05cdc636552868bf = []byte{ + // 270 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x0a, 0xaa, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x17, 0x64, 0x67, 0xea, 0x17, 0xa5, 0x96, 0xe5, 0x67, 0xa7, 0xa6, 0xc4, 0xe7, + 0xe5, 0x27, 0xc7, 0x17, 0xe5, 0xe7, 0x97, 0xc4, 0x27, 0xa7, 0x16, 0x95, 0x64, 0xa6, 0x65, 0x26, + 0x27, 0x96, 0xa4, 0x16, 0xeb, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0xe9, 0xa1, 0x9a, 0xa9, 0x87, + 0xc7, 0x4c, 0xbd, 0x82, 0xec, 0x4c, 0x29, 0x07, 0x12, 0xdd, 0x80, 0x64, 0x25, 0xc4, 0x46, 0xa5, + 0xb5, 0x8c, 0x5c, 0x52, 0x41, 0x10, 0x97, 0xf9, 0xe5, 0x27, 0x07, 0xe5, 0xe7, 0x97, 0x38, 0x23, + 0x39, 0x4b, 0x48, 0x82, 0x8b, 0xbd, 0xb8, 0x34, 0x29, 0x2b, 0x35, 0xb9, 0x44, 0x82, 0x51, 0x81, + 0x51, 0x83, 0x33, 0x08, 0xc6, 0x15, 0x52, 0xe2, 0xe2, 0x81, 0x32, 0xbd, 0x53, 0x2b, 0x3d, 0x53, + 0x24, 0x98, 0xc0, 0xd2, 0x28, 0x62, 0x42, 0x81, 0x5c, 0xac, 0x20, 0x1b, 0x8b, 0x25, 0x98, 0x15, + 0x98, 0x35, 0xb8, 0x8d, 0xac, 0x49, 0xf4, 0x9e, 0x1e, 0x92, 0x53, 0x82, 0x20, 0x26, 0x39, 0xc5, + 0x9d, 0x78, 0x24, 0xc7, 0x78, 0xe1, 0x91, 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x13, 0x1e, 0xcb, + 0x31, 0x5c, 0x78, 0x2c, 0xc7, 0x70, 0xe3, 0xb1, 0x1c, 0x43, 0x94, 0x4b, 0x7a, 0x66, 0x49, 0x46, + 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x3e, 0xc4, 0x1e, 0x5d, 0x6c, 0xe1, 0xa2, 0x8b, 0xb0, 0x49, + 0x17, 0x1a, 0x32, 0x15, 0xe0, 0xb0, 0x29, 0xa9, 0x2c, 0x48, 0x2d, 0x4e, 0x62, 0x03, 0x07, 0x8b, + 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0x47, 0x00, 0x55, 0xda, 0xde, 0x01, 0x00, 0x00, } func (m *RevokedNocRootCertificates) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/revoked_root_certificates.pb.go b/x/pki/types/revoked_root_certificates.pb.go index 26422f375..3a562f638 100644 --- a/x/pki/types/revoked_root_certificates.pb.go +++ b/x/pki/types/revoked_root_certificates.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/revoked_root_certificates.proto +// source: zigbeealliance/distributedcomplianceledger/pki/revoked_root_certificates.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -30,7 +30,7 @@ func (m *RevokedRootCertificates) Reset() { *m = RevokedRootCertificates func (m *RevokedRootCertificates) String() string { return proto.CompactTextString(m) } func (*RevokedRootCertificates) ProtoMessage() {} func (*RevokedRootCertificates) Descriptor() ([]byte, []int) { - return fileDescriptor_7c9e175bb3093f48, []int{0} + return fileDescriptor_e94861023281b0eb, []int{0} } func (m *RevokedRootCertificates) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -71,26 +71,26 @@ func init() { } func init() { - proto.RegisterFile("pki/revoked_root_certificates.proto", fileDescriptor_7c9e175bb3093f48) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/revoked_root_certificates.proto", fileDescriptor_e94861023281b0eb) } -var fileDescriptor_7c9e175bb3093f48 = []byte{ - // 233 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0xcf, 0x3d, 0x4e, 0xc4, 0x30, - 0x10, 0x86, 0xe1, 0x44, 0x08, 0x8a, 0xd0, 0x6d, 0x03, 0xa2, 0xb0, 0x56, 0xd0, 0xd0, 0xc4, 0x96, - 0xe0, 0x06, 0xfc, 0x14, 0xb4, 0x29, 0x41, 0x22, 0x4a, 0xec, 0x21, 0x8c, 0x92, 0xdd, 0xb1, 0xec, - 0xd9, 0x15, 0x70, 0x0a, 0x8e, 0x45, 0x99, 0x92, 0x12, 0x25, 0x17, 0x41, 0xc6, 0x02, 0xdc, 0x6e, - 0x69, 0x5b, 0x7e, 0x1f, 0x7d, 0xc5, 0x99, 0xed, 0x51, 0x39, 0xd8, 0x52, 0x0f, 0xa6, 0x76, 0x44, - 0x5c, 0x6b, 0x70, 0x8c, 0x4f, 0xa8, 0x1b, 0x06, 0x2f, 0xad, 0x23, 0xa6, 0x85, 0x7c, 0xc3, 0xae, - 0x05, 0x68, 0x86, 0x01, 0x9b, 0xb5, 0x06, 0x69, 0xd0, 0xb3, 0xc3, 0x76, 0xc3, 0x60, 0x34, 0xad, - 0x6c, 0xbc, 0x1d, 0xc0, 0x74, 0xe0, 0xa4, 0xed, 0xf1, 0x64, 0x19, 0xa2, 0x49, 0xa7, 0x46, 0x03, - 0xeb, 0x70, 0x08, 0xef, 0xa1, 0x78, 0xba, 0x2d, 0x8e, 0xaa, 0x88, 0x56, 0x44, 0x7c, 0x9d, 0x90, - 0x8b, 0x87, 0x62, 0x3f, 0x7c, 0xf5, 0xc7, 0xf9, 0x72, 0xef, 0xfc, 0xf0, 0xe2, 0x76, 0x47, 0x5c, - 0x26, 0xb1, 0xbb, 0x3f, 0xb6, 0x8a, 0xcd, 0xab, 0xc7, 0x8f, 0x49, 0xe4, 0xe3, 0x24, 0xf2, 0xaf, - 0x49, 0xe4, 0xef, 0xb3, 0xc8, 0xc6, 0x59, 0x64, 0x9f, 0xb3, 0xc8, 0xee, 0x6f, 0x3a, 0xe4, 0xe7, - 0x4d, 0x2b, 0x35, 0xad, 0x54, 0x14, 0xcb, 0x5f, 0x52, 0x25, 0x64, 0xf9, 0x6f, 0x96, 0x11, 0x55, - 0x2f, 0x2a, 0xcc, 0xe5, 0x57, 0x0b, 0xbe, 0x3d, 0xf8, 0x99, 0x77, 0xf9, 0x1d, 0x00, 0x00, 0xff, - 0xff, 0x30, 0x6d, 0xb9, 0xc8, 0x57, 0x01, 0x00, 0x00, +var fileDescriptor_e94861023281b0eb = []byte{ + // 237 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xf2, 0xab, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x17, 0x64, 0x67, 0xea, 0x17, 0xa5, 0x96, 0xe5, 0x67, 0xa7, 0xa6, 0xc4, 0x17, + 0xe5, 0xe7, 0x97, 0xc4, 0x27, 0xa7, 0x16, 0x95, 0x64, 0xa6, 0x65, 0x26, 0x27, 0x96, 0xa4, 0x16, + 0xeb, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0xe9, 0xa1, 0x9a, 0xa7, 0x87, 0xc7, 0x3c, 0xbd, 0x82, + 0xec, 0x4c, 0x29, 0x6f, 0x12, 0xed, 0x47, 0xb2, 0x32, 0x3e, 0x33, 0x25, 0x35, 0x0f, 0xc4, 0x01, + 0x19, 0x05, 0xb2, 0x5c, 0xa9, 0x8c, 0x4b, 0x3c, 0x08, 0xe2, 0xbe, 0xa0, 0xfc, 0xfc, 0x12, 0x67, + 0x24, 0xd7, 0x09, 0x45, 0x73, 0xb1, 0x82, 0xb4, 0x16, 0x4b, 0x30, 0x2a, 0x30, 0x6b, 0x70, 0x1b, + 0xb9, 0x92, 0xe8, 0x4e, 0x3d, 0x24, 0xc3, 0x3c, 0xe1, 0xd6, 0x06, 0x41, 0xcc, 0x74, 0x8a, 0x3b, + 0xf1, 0x48, 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, 0xe4, 0x18, 0x27, 0x3c, 0x96, 0x63, + 0xb8, 0xf0, 0x58, 0x8e, 0xe1, 0xc6, 0x63, 0x39, 0x86, 0x28, 0x97, 0xf4, 0xcc, 0x92, 0x8c, 0xd2, + 0x24, 0xbd, 0xe4, 0xfc, 0x5c, 0x7d, 0x88, 0x8d, 0xba, 0xd8, 0xbc, 0xaa, 0x8b, 0xb0, 0x53, 0x17, + 0xea, 0xd9, 0x0a, 0xb0, 0x77, 0x4b, 0x2a, 0x0b, 0x52, 0x8b, 0x93, 0xd8, 0xc0, 0xde, 0x33, 0x06, + 0x04, 0x00, 0x00, 0xff, 0xff, 0x13, 0xab, 0x09, 0x2e, 0xad, 0x01, 0x00, 0x00, } func (m *RevokedRootCertificates) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/tx.pb.go b/x/pki/types/tx.pb.go index 564a59df2..37dccfaa6 100644 --- a/x/pki/types/tx.pb.go +++ b/x/pki/types/tx.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/tx.proto +// source: zigbeealliance/distributedcomplianceledger/pki/tx.proto package types @@ -7,9 +7,9 @@ import ( context "context" fmt "fmt" _ "github.com/cosmos/cosmos-proto" - _ "github.com/gogo/protobuf/gogoproto" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" @@ -43,7 +43,7 @@ func (m *MsgProposeAddX509RootCert) Reset() { *m = MsgProposeAddX509Root func (m *MsgProposeAddX509RootCert) String() string { return proto.CompactTextString(m) } func (*MsgProposeAddX509RootCert) ProtoMessage() {} func (*MsgProposeAddX509RootCert) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{0} + return fileDescriptor_3047b9df3153e42a, []int{0} } func (m *MsgProposeAddX509RootCert) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -128,7 +128,7 @@ func (m *MsgProposeAddX509RootCertResponse) Reset() { *m = MsgProposeAdd func (m *MsgProposeAddX509RootCertResponse) String() string { return proto.CompactTextString(m) } func (*MsgProposeAddX509RootCertResponse) ProtoMessage() {} func (*MsgProposeAddX509RootCertResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{1} + return fileDescriptor_3047b9df3153e42a, []int{1} } func (m *MsgProposeAddX509RootCertResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -169,7 +169,7 @@ func (m *MsgApproveAddX509RootCert) Reset() { *m = MsgApproveAddX509Root func (m *MsgApproveAddX509RootCert) String() string { return proto.CompactTextString(m) } func (*MsgApproveAddX509RootCert) ProtoMessage() {} func (*MsgApproveAddX509RootCert) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{2} + return fileDescriptor_3047b9df3153e42a, []int{2} } func (m *MsgApproveAddX509RootCert) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -240,7 +240,7 @@ func (m *MsgApproveAddX509RootCertResponse) Reset() { *m = MsgApproveAdd func (m *MsgApproveAddX509RootCertResponse) String() string { return proto.CompactTextString(m) } func (*MsgApproveAddX509RootCertResponse) ProtoMessage() {} func (*MsgApproveAddX509RootCertResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{3} + return fileDescriptor_3047b9df3153e42a, []int{3} } func (m *MsgApproveAddX509RootCertResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -282,7 +282,7 @@ func (m *MsgAddX509Cert) Reset() { *m = MsgAddX509Cert{} } func (m *MsgAddX509Cert) String() string { return proto.CompactTextString(m) } func (*MsgAddX509Cert) ProtoMessage() {} func (*MsgAddX509Cert) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{4} + return fileDescriptor_3047b9df3153e42a, []int{4} } func (m *MsgAddX509Cert) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -360,7 +360,7 @@ func (m *MsgAddX509CertResponse) Reset() { *m = MsgAddX509CertResponse{} func (m *MsgAddX509CertResponse) String() string { return proto.CompactTextString(m) } func (*MsgAddX509CertResponse) ProtoMessage() {} func (*MsgAddX509CertResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{5} + return fileDescriptor_3047b9df3153e42a, []int{5} } func (m *MsgAddX509CertResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -404,7 +404,7 @@ func (m *MsgProposeRevokeX509RootCert) Reset() { *m = MsgProposeRevokeX5 func (m *MsgProposeRevokeX509RootCert) String() string { return proto.CompactTextString(m) } func (*MsgProposeRevokeX509RootCert) ProtoMessage() {} func (*MsgProposeRevokeX509RootCert) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{6} + return fileDescriptor_3047b9df3153e42a, []int{6} } func (m *MsgProposeRevokeX509RootCert) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -496,7 +496,7 @@ func (m *MsgProposeRevokeX509RootCertResponse) Reset() { *m = MsgPropose func (m *MsgProposeRevokeX509RootCertResponse) String() string { return proto.CompactTextString(m) } func (*MsgProposeRevokeX509RootCertResponse) ProtoMessage() {} func (*MsgProposeRevokeX509RootCertResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{7} + return fileDescriptor_3047b9df3153e42a, []int{7} } func (m *MsgProposeRevokeX509RootCertResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -538,7 +538,7 @@ func (m *MsgApproveRevokeX509RootCert) Reset() { *m = MsgApproveRevokeX5 func (m *MsgApproveRevokeX509RootCert) String() string { return proto.CompactTextString(m) } func (*MsgApproveRevokeX509RootCert) ProtoMessage() {} func (*MsgApproveRevokeX509RootCert) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{8} + return fileDescriptor_3047b9df3153e42a, []int{8} } func (m *MsgApproveRevokeX509RootCert) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -616,7 +616,7 @@ func (m *MsgApproveRevokeX509RootCertResponse) Reset() { *m = MsgApprove func (m *MsgApproveRevokeX509RootCertResponse) String() string { return proto.CompactTextString(m) } func (*MsgApproveRevokeX509RootCertResponse) ProtoMessage() {} func (*MsgApproveRevokeX509RootCertResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{9} + return fileDescriptor_3047b9df3153e42a, []int{9} } func (m *MsgApproveRevokeX509RootCertResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -660,7 +660,7 @@ func (m *MsgRevokeX509Cert) Reset() { *m = MsgRevokeX509Cert{} } func (m *MsgRevokeX509Cert) String() string { return proto.CompactTextString(m) } func (*MsgRevokeX509Cert) ProtoMessage() {} func (*MsgRevokeX509Cert) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{10} + return fileDescriptor_3047b9df3153e42a, []int{10} } func (m *MsgRevokeX509Cert) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -752,7 +752,7 @@ func (m *MsgRevokeX509CertResponse) Reset() { *m = MsgRevokeX509CertResp func (m *MsgRevokeX509CertResponse) String() string { return proto.CompactTextString(m) } func (*MsgRevokeX509CertResponse) ProtoMessage() {} func (*MsgRevokeX509CertResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{11} + return fileDescriptor_3047b9df3153e42a, []int{11} } func (m *MsgRevokeX509CertResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -794,7 +794,7 @@ func (m *MsgRejectAddX509RootCert) Reset() { *m = MsgRejectAddX509RootCe func (m *MsgRejectAddX509RootCert) String() string { return proto.CompactTextString(m) } func (*MsgRejectAddX509RootCert) ProtoMessage() {} func (*MsgRejectAddX509RootCert) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{12} + return fileDescriptor_3047b9df3153e42a, []int{12} } func (m *MsgRejectAddX509RootCert) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -872,7 +872,7 @@ func (m *MsgRejectAddX509RootCertResponse) Reset() { *m = MsgRejectAddX5 func (m *MsgRejectAddX509RootCertResponse) String() string { return proto.CompactTextString(m) } func (*MsgRejectAddX509RootCertResponse) ProtoMessage() {} func (*MsgRejectAddX509RootCertResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{13} + return fileDescriptor_3047b9df3153e42a, []int{13} } func (m *MsgRejectAddX509RootCertResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -922,7 +922,7 @@ func (m *MsgAddPkiRevocationDistributionPoint) Reset() { *m = MsgAddPkiR func (m *MsgAddPkiRevocationDistributionPoint) String() string { return proto.CompactTextString(m) } func (*MsgAddPkiRevocationDistributionPoint) ProtoMessage() {} func (*MsgAddPkiRevocationDistributionPoint) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{14} + return fileDescriptor_3047b9df3153e42a, []int{14} } func (m *MsgAddPkiRevocationDistributionPoint) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1060,7 +1060,7 @@ func (m *MsgAddPkiRevocationDistributionPointResponse) String() string { } func (*MsgAddPkiRevocationDistributionPointResponse) ProtoMessage() {} func (*MsgAddPkiRevocationDistributionPointResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{15} + return fileDescriptor_3047b9df3153e42a, []int{15} } func (m *MsgAddPkiRevocationDistributionPointResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1109,7 +1109,7 @@ func (m *MsgUpdatePkiRevocationDistributionPoint) Reset() { func (m *MsgUpdatePkiRevocationDistributionPoint) String() string { return proto.CompactTextString(m) } func (*MsgUpdatePkiRevocationDistributionPoint) ProtoMessage() {} func (*MsgUpdatePkiRevocationDistributionPoint) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{16} + return fileDescriptor_3047b9df3153e42a, []int{16} } func (m *MsgUpdatePkiRevocationDistributionPoint) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1226,7 +1226,7 @@ func (m *MsgUpdatePkiRevocationDistributionPointResponse) String() string { } func (*MsgUpdatePkiRevocationDistributionPointResponse) ProtoMessage() {} func (*MsgUpdatePkiRevocationDistributionPointResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{17} + return fileDescriptor_3047b9df3153e42a, []int{17} } func (m *MsgUpdatePkiRevocationDistributionPointResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1268,7 +1268,7 @@ func (m *MsgDeletePkiRevocationDistributionPoint) Reset() { func (m *MsgDeletePkiRevocationDistributionPoint) String() string { return proto.CompactTextString(m) } func (*MsgDeletePkiRevocationDistributionPoint) ProtoMessage() {} func (*MsgDeletePkiRevocationDistributionPoint) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{18} + return fileDescriptor_3047b9df3153e42a, []int{18} } func (m *MsgDeletePkiRevocationDistributionPoint) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1336,7 +1336,7 @@ func (m *MsgDeletePkiRevocationDistributionPointResponse) String() string { } func (*MsgDeletePkiRevocationDistributionPointResponse) ProtoMessage() {} func (*MsgDeletePkiRevocationDistributionPointResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{19} + return fileDescriptor_3047b9df3153e42a, []int{19} } func (m *MsgDeletePkiRevocationDistributionPointResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1376,7 +1376,7 @@ func (m *MsgAssignVid) Reset() { *m = MsgAssignVid{} } func (m *MsgAssignVid) String() string { return proto.CompactTextString(m) } func (*MsgAssignVid) ProtoMessage() {} func (*MsgAssignVid) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{20} + return fileDescriptor_3047b9df3153e42a, []int{20} } func (m *MsgAssignVid) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1440,7 +1440,7 @@ func (m *MsgAssignVidResponse) Reset() { *m = MsgAssignVidResponse{} } func (m *MsgAssignVidResponse) String() string { return proto.CompactTextString(m) } func (*MsgAssignVidResponse) ProtoMessage() {} func (*MsgAssignVidResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{21} + return fileDescriptor_3047b9df3153e42a, []int{21} } func (m *MsgAssignVidResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1480,7 +1480,7 @@ func (m *MsgAddNocX509RootCert) Reset() { *m = MsgAddNocX509RootCert{} } func (m *MsgAddNocX509RootCert) String() string { return proto.CompactTextString(m) } func (*MsgAddNocX509RootCert) ProtoMessage() {} func (*MsgAddNocX509RootCert) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{22} + return fileDescriptor_3047b9df3153e42a, []int{22} } func (m *MsgAddNocX509RootCert) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1544,7 +1544,7 @@ func (m *MsgAddNocX509RootCertResponse) Reset() { *m = MsgAddNocX509Root func (m *MsgAddNocX509RootCertResponse) String() string { return proto.CompactTextString(m) } func (*MsgAddNocX509RootCertResponse) ProtoMessage() {} func (*MsgAddNocX509RootCertResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{23} + return fileDescriptor_3047b9df3153e42a, []int{23} } func (m *MsgAddNocX509RootCertResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1584,7 +1584,7 @@ func (m *MsgRemoveX509Cert) Reset() { *m = MsgRemoveX509Cert{} } func (m *MsgRemoveX509Cert) String() string { return proto.CompactTextString(m) } func (*MsgRemoveX509Cert) ProtoMessage() {} func (*MsgRemoveX509Cert) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{24} + return fileDescriptor_3047b9df3153e42a, []int{24} } func (m *MsgRemoveX509Cert) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1648,7 +1648,7 @@ func (m *MsgRemoveX509CertResponse) Reset() { *m = MsgRemoveX509CertResp func (m *MsgRemoveX509CertResponse) String() string { return proto.CompactTextString(m) } func (*MsgRemoveX509CertResponse) ProtoMessage() {} func (*MsgRemoveX509CertResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{25} + return fileDescriptor_3047b9df3153e42a, []int{25} } func (m *MsgRemoveX509CertResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1688,7 +1688,7 @@ func (m *MsgAddNocX509IcaCert) Reset() { *m = MsgAddNocX509IcaCert{} } func (m *MsgAddNocX509IcaCert) String() string { return proto.CompactTextString(m) } func (*MsgAddNocX509IcaCert) ProtoMessage() {} func (*MsgAddNocX509IcaCert) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{26} + return fileDescriptor_3047b9df3153e42a, []int{26} } func (m *MsgAddNocX509IcaCert) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1752,7 +1752,7 @@ func (m *MsgAddNocX509IcaCertResponse) Reset() { *m = MsgAddNocX509IcaCe func (m *MsgAddNocX509IcaCertResponse) String() string { return proto.CompactTextString(m) } func (*MsgAddNocX509IcaCertResponse) ProtoMessage() {} func (*MsgAddNocX509IcaCertResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{27} + return fileDescriptor_3047b9df3153e42a, []int{27} } func (m *MsgAddNocX509IcaCertResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1796,7 +1796,7 @@ func (m *MsgRevokeNocX509RootCert) Reset() { *m = MsgRevokeNocX509RootCe func (m *MsgRevokeNocX509RootCert) String() string { return proto.CompactTextString(m) } func (*MsgRevokeNocX509RootCert) ProtoMessage() {} func (*MsgRevokeNocX509RootCert) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{28} + return fileDescriptor_3047b9df3153e42a, []int{28} } func (m *MsgRevokeNocX509RootCert) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1888,7 +1888,7 @@ func (m *MsgRevokeNocX509RootCertResponse) Reset() { *m = MsgRevokeNocX5 func (m *MsgRevokeNocX509RootCertResponse) String() string { return proto.CompactTextString(m) } func (*MsgRevokeNocX509RootCertResponse) ProtoMessage() {} func (*MsgRevokeNocX509RootCertResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{29} + return fileDescriptor_3047b9df3153e42a, []int{29} } func (m *MsgRevokeNocX509RootCertResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1932,7 +1932,7 @@ func (m *MsgRevokeNocX509IcaCert) Reset() { *m = MsgRevokeNocX509IcaCert func (m *MsgRevokeNocX509IcaCert) String() string { return proto.CompactTextString(m) } func (*MsgRevokeNocX509IcaCert) ProtoMessage() {} func (*MsgRevokeNocX509IcaCert) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{30} + return fileDescriptor_3047b9df3153e42a, []int{30} } func (m *MsgRevokeNocX509IcaCert) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2024,7 +2024,7 @@ func (m *MsgRevokeNocX509IcaCertResponse) Reset() { *m = MsgRevokeNocX50 func (m *MsgRevokeNocX509IcaCertResponse) String() string { return proto.CompactTextString(m) } func (*MsgRevokeNocX509IcaCertResponse) ProtoMessage() {} func (*MsgRevokeNocX509IcaCertResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_badfdb2b39855d16, []int{31} + return fileDescriptor_3047b9df3153e42a, []int{31} } func (m *MsgRevokeNocX509IcaCertResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -2088,105 +2088,107 @@ func init() { proto.RegisterType((*MsgRevokeNocX509IcaCertResponse)(nil), "zigbeealliance.distributedcomplianceledger.pki.MsgRevokeNocX509IcaCertResponse") } -func init() { proto.RegisterFile("pki/tx.proto", fileDescriptor_badfdb2b39855d16) } - -var fileDescriptor_badfdb2b39855d16 = []byte{ - // 1515 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x5a, 0xcd, 0x6f, 0xd4, 0xc6, - 0x1b, 0xc6, 0xfb, 0x91, 0x8f, 0x37, 0x21, 0x12, 0xf3, 0x0b, 0xc1, 0x18, 0x7e, 0xbb, 0x8b, 0x41, - 0x10, 0xa9, 0xc9, 0x6e, 0x08, 0x6c, 0x0a, 0xa8, 0xb4, 0x4a, 0xd8, 0x02, 0x11, 0x09, 0x4d, 0x1d, - 0xa0, 0x55, 0x55, 0x15, 0x79, 0xd7, 0x83, 0x99, 0xc6, 0xbb, 0x76, 0x6d, 0x6f, 0x44, 0xb8, 0xf5, - 0x2f, 0x28, 0x52, 0xd5, 0x6b, 0x6f, 0x6d, 0x25, 0xa4, 0x22, 0xaa, 0x7e, 0x5c, 0x7b, 0xed, 0xa5, - 0x12, 0xea, 0xa5, 0x3d, 0xad, 0x10, 0x5c, 0x2a, 0xf5, 0xb6, 0xf7, 0x4a, 0x95, 0x3f, 0x76, 0xbc, - 0xde, 0xb5, 0x93, 0x5d, 0xc7, 0xa9, 0x42, 0x9a, 0x9b, 0x3d, 0x3b, 0xef, 0x33, 0x33, 0xef, 0xf3, - 0xbc, 0xe3, 0x79, 0xdf, 0x59, 0x18, 0xd5, 0xd6, 0x48, 0xc1, 0x7c, 0x90, 0xd7, 0x74, 0xd5, 0x54, - 0x51, 0xfe, 0x21, 0x91, 0xcb, 0x18, 0x8b, 0x8a, 0x42, 0xc4, 0x5a, 0x05, 0xe7, 0x25, 0x62, 0x98, - 0x3a, 0x29, 0xd7, 0x4d, 0x2c, 0x55, 0xd4, 0xaa, 0xe6, 0xb4, 0x2a, 0x58, 0x92, 0xb1, 0x9e, 0xd7, - 0xd6, 0x08, 0x77, 0xb4, 0xa2, 0x1a, 0x55, 0xd5, 0xb8, 0x6b, 0x5b, 0x17, 0x9c, 0x17, 0x07, 0x8a, - 0x1b, 0x97, 0x55, 0x59, 0x75, 0xda, 0xad, 0x27, 0xa7, 0x95, 0x7f, 0x92, 0x84, 0xa3, 0xcb, 0x86, - 0xbc, 0xa2, 0xab, 0x9a, 0x6a, 0xe0, 0x79, 0x49, 0x7a, 0xbf, 0x38, 0x73, 0x51, 0x50, 0x55, 0xf3, - 0x0a, 0xd6, 0x4d, 0x74, 0x0d, 0x06, 0x0c, 0x22, 0xd7, 0xb0, 0xce, 0x32, 0x39, 0x66, 0x72, 0x78, - 0xa1, 0xd0, 0x6c, 0x64, 0xff, 0xb7, 0x2e, 0x2a, 0x44, 0x12, 0x4d, 0x7c, 0x89, 0xd7, 0xf1, 0x27, - 0x75, 0xa2, 0x63, 0x89, 0xff, 0xed, 0x87, 0xe9, 0x71, 0x77, 0xb0, 0x79, 0x49, 0xd2, 0xb1, 0x61, - 0xac, 0x9a, 0x3a, 0xa9, 0xc9, 0x82, 0x6b, 0x8e, 0x2e, 0x40, 0xaa, 0x82, 0x75, 0x93, 0x4d, 0xd8, - 0x30, 0xa7, 0x9a, 0x8d, 0x6c, 0xae, 0x1b, 0x66, 0xaa, 0x2a, 0x3e, 0xb8, 0x7c, 0x76, 0xe6, 0xfc, - 0x85, 0xe2, 0xeb, 0x73, 0x33, 0xbc, 0x60, 0x5b, 0xa0, 0xd7, 0x20, 0x45, 0x6a, 0xf7, 0x54, 0x36, - 0x69, 0x5b, 0x1e, 0xf1, 0x4f, 0xc0, 0x32, 0x38, 0x3f, 0x73, 0x71, 0x8e, 0x17, 0xec, 0x4e, 0x08, - 0x41, 0xca, 0x24, 0x55, 0xcc, 0xa6, 0x72, 0xcc, 0x64, 0x52, 0xb0, 0x9f, 0xd1, 0x45, 0x48, 0xae, - 0x13, 0x89, 0x4d, 0xe7, 0x98, 0xc9, 0xf4, 0xc2, 0x99, 0x66, 0x23, 0x7b, 0xd2, 0xb3, 0x97, 0x4d, - 0x7c, 0xf9, 0xec, 0x94, 0x62, 0xe2, 0xcb, 0x73, 0xc5, 0xe2, 0xb9, 0xe2, 0x14, 0x5d, 0x90, 0x60, - 0xd9, 0xa0, 0x25, 0x38, 0x64, 0xcd, 0x61, 0xb5, 0x72, 0x1f, 0x57, 0xc5, 0x3b, 0x58, 0x37, 0x88, - 0x5a, 0x63, 0x07, 0x72, 0xcc, 0xe4, 0xc1, 0x85, 0x4c, 0xb3, 0x91, 0xe5, 0xfc, 0x40, 0x33, 0x1e, - 0x10, 0x2f, 0x74, 0x1b, 0xa2, 0x12, 0x1c, 0x34, 0x7c, 0x48, 0x83, 0x3d, 0x21, 0xf9, 0x8d, 0xf8, - 0x93, 0x70, 0x22, 0x94, 0x2f, 0x01, 0x1b, 0x9a, 0x5a, 0x33, 0x30, 0xff, 0x75, 0xc2, 0x66, 0x75, - 0x5e, 0xd3, 0x74, 0x75, 0x7d, 0xe7, 0x58, 0xbd, 0x04, 0x83, 0x46, 0xbd, 0xfc, 0x31, 0xae, 0xb4, - 0x88, 0xcd, 0x35, 0x1b, 0xd9, 0xe3, 0xa1, 0xc4, 0xce, 0x9e, 0xe7, 0x85, 0x96, 0x01, 0xba, 0x02, - 0xa3, 0xee, 0xe3, 0x0d, 0xbc, 0xb1, 0x28, 0xb9, 0xfc, 0x66, 0x9b, 0x8d, 0xec, 0xb1, 0x10, 0x80, - 0xd9, 0xe2, 0x1c, 0x2f, 0xf8, 0x8c, 0xa8, 0x38, 0x52, 0xfd, 0x88, 0x23, 0xed, 0x89, 0xc3, 0xf5, - 0x66, 0xb0, 0x9f, 0xa8, 0x37, 0xff, 0x4c, 0xc0, 0x98, 0xd5, 0xcb, 0xf9, 0x79, 0xcf, 0x04, 0x46, - 0xa0, 0xba, 0xd3, 0xb1, 0xa9, 0x7b, 0x20, 0x8a, 0xba, 0x59, 0x98, 0xf0, 0x7b, 0x9a, 0x92, 0xf0, - 0x63, 0x12, 0x8e, 0x7b, 0xc2, 0x17, 0xf0, 0xba, 0xba, 0x86, 0xf7, 0x55, 0x1d, 0xa8, 0x6a, 0xc4, - 0xc3, 0xa8, 0x81, 0x75, 0x22, 0x2a, 0x37, 0xeb, 0xd5, 0x32, 0xd6, 0x6d, 0x2a, 0x86, 0x05, 0x5f, - 0x1b, 0xca, 0xc1, 0x88, 0x6e, 0x3b, 0xf1, 0xca, 0x7d, 0xa2, 0x48, 0xf6, 0x5e, 0x34, 0x24, 0xb4, - 0x37, 0x75, 0x33, 0x3a, 0x14, 0x85, 0xd1, 0xd3, 0x70, 0x6a, 0x33, 0xda, 0x28, 0xbf, 0xbf, 0x26, - 0x6c, 0x7e, 0xdd, 0x50, 0xfc, 0x4f, 0xf0, 0x9b, 0xee, 0x87, 0xdf, 0x81, 0x4d, 0xf8, 0x1d, 0xec, - 0xe6, 0xd7, 0xf5, 0x7b, 0xa8, 0x3b, 0xa9, 0xdf, 0x1f, 0x27, 0xe1, 0xd0, 0xb2, 0x21, 0x7b, 0x3d, - 0xf6, 0x83, 0x69, 0xf7, 0x06, 0xd3, 0x31, 0xfb, 0xb3, 0xee, 0xe7, 0x8a, 0x32, 0xf9, 0x57, 0x02, - 0x58, 0xfb, 0x57, 0x6b, 0xf1, 0xfb, 0xdf, 0xfc, 0x50, 0x42, 0xe3, 0xf9, 0x52, 0xf1, 0x90, 0x0b, - 0x73, 0x36, 0x65, 0xe4, 0xc9, 0x80, 0x13, 0x84, 0x92, 0xb4, 0xb2, 0x46, 0x2c, 0xd6, 0x2a, 0xa2, - 0x49, 0xd4, 0x5a, 0xa9, 0x75, 0x88, 0x27, 0x6a, 0x6d, 0x45, 0x25, 0xb5, 0x18, 0xd9, 0x71, 0x0f, - 0xbb, 0x89, 0x08, 0x87, 0xdd, 0x19, 0x48, 0x6a, 0xc4, 0xe1, 0x24, 0xbd, 0xa5, 0x33, 0xac, 0xae, - 0x68, 0x1c, 0xd2, 0xc4, 0x58, 0x99, 0x9f, 0xb7, 0xa9, 0x18, 0x12, 0x9c, 0x17, 0x34, 0x0d, 0x69, - 0x45, 0x2c, 0x63, 0x25, 0x78, 0x7b, 0xf3, 0x06, 0x75, 0x7a, 0xa1, 0x1b, 0x30, 0x5e, 0xd1, 0x95, - 0x55, 0x7b, 0xfa, 0x96, 0xf3, 0xc8, 0x3d, 0x52, 0x11, 0x4d, 0x67, 0xbf, 0xdb, 0xc4, 0x3a, 0xd0, - 0x08, 0x5d, 0x03, 0x44, 0x0c, 0xa3, 0x8e, 0xf5, 0x55, 0x4f, 0x31, 0x25, 0x67, 0x7b, 0x0c, 0x87, - 0x0a, 0x30, 0x41, 0x45, 0x18, 0x94, 0x44, 0x53, 0xbc, 0x2d, 0x2c, 0xd9, 0x81, 0x3a, 0xbc, 0x70, - 0xac, 0xd9, 0xc8, 0x1e, 0x09, 0x10, 0x69, 0x5d, 0x57, 0x78, 0xa1, 0xd5, 0xd7, 0xda, 0x2b, 0xac, - 0xc7, 0xab, 0x44, 0xc1, 0xab, 0xe4, 0x21, 0x66, 0x87, 0x73, 0xcc, 0x64, 0x4a, 0xf0, 0xb5, 0xa1, - 0x0c, 0x80, 0xf5, 0x5e, 0x22, 0x32, 0x36, 0x4c, 0x16, 0xec, 0xdd, 0xa4, 0xad, 0x05, 0x9d, 0x86, - 0x31, 0xef, 0xed, 0xd6, 0x86, 0x86, 0xd9, 0x11, 0x4b, 0x9f, 0x42, 0x47, 0x2b, 0x7a, 0x0b, 0xc6, - 0x74, 0x2a, 0x29, 0xbb, 0xdf, 0xa8, 0xad, 0xe3, 0xd0, 0x75, 0x76, 0x74, 0xef, 0x8e, 0x83, 0x83, - 0x11, 0xe2, 0x00, 0xdd, 0x00, 0x44, 0xa9, 0x28, 0x61, 0x05, 0xcb, 0xa2, 0xa9, 0xea, 0xec, 0x58, - 0x90, 0xd3, 0xfc, 0xa7, 0xd8, 0x00, 0x33, 0x3e, 0x0f, 0x53, 0xbd, 0xc4, 0x0b, 0x0d, 0xb0, 0x47, - 0x69, 0x38, 0xb3, 0x6c, 0xc8, 0xb7, 0x35, 0x6b, 0x80, 0x57, 0x20, 0xc6, 0x68, 0x6c, 0x24, 0x7b, - 0x8a, 0x8d, 0xd9, 0x90, 0xd8, 0xb0, 0xb7, 0xbe, 0xbe, 0x42, 0x20, 0xdd, 0x7f, 0x08, 0xcc, 0x79, - 0x21, 0xe0, 0xc4, 0xe2, 0xf1, 0x66, 0x23, 0xcb, 0x7a, 0xd6, 0x6a, 0x95, 0x98, 0xb8, 0xaa, 0x99, - 0x1b, 0x5b, 0xc4, 0xc0, 0xe0, 0x96, 0x31, 0x30, 0xd4, 0x43, 0x0c, 0x0c, 0x07, 0xc6, 0x40, 0x97, - 0x84, 0x21, 0x3e, 0x09, 0x8f, 0x44, 0x93, 0xf0, 0x59, 0x28, 0xf4, 0xa8, 0x48, 0xaa, 0xe2, 0xaf, - 0x12, 0xb6, 0x8a, 0x2d, 0x8c, 0xbd, 0xa8, 0xe2, 0x60, 0x45, 0xa6, 0xfa, 0x56, 0xa4, 0xeb, 0xda, - 0x5e, 0xdc, 0x44, 0x5d, 0xfb, 0x79, 0x02, 0x46, 0xad, 0x1d, 0xc5, 0xb0, 0x56, 0x7d, 0x87, 0x48, - 0x7b, 0xe8, 0x1c, 0xe4, 0x12, 0x98, 0xea, 0x9f, 0x40, 0x7e, 0x02, 0xc6, 0xdb, 0x9d, 0xe2, 0x9d, - 0x57, 0x12, 0x70, 0xd8, 0xd9, 0x7f, 0x6f, 0xaa, 0x95, 0xdd, 0x56, 0x08, 0x0c, 0x2c, 0x57, 0xa4, - 0x62, 0x2b, 0x57, 0xa4, 0xa3, 0x1c, 0x02, 0xb3, 0xf0, 0xff, 0x40, 0x7f, 0x51, 0x8f, 0x7e, 0x9a, - 0x70, 0xb3, 0xab, 0xaa, 0xba, 0xbe, 0x17, 0xb3, 0xab, 0xce, 0xe4, 0x28, 0x15, 0x90, 0x89, 0xb6, - 0x92, 0x96, 0x76, 0x17, 0x50, 0x07, 0x7d, 0x9b, 0x70, 0xb4, 0x48, 0x5d, 0xb8, 0x58, 0x11, 0x77, - 0xb5, 0xe2, 0x92, 0xb1, 0x29, 0x2e, 0x15, 0x45, 0x71, 0x19, 0xa7, 0x4a, 0xd2, 0xe9, 0x2e, 0xea, - 0xcf, 0xef, 0x92, 0x6e, 0x12, 0x68, 0xa5, 0x88, 0x3b, 0x16, 0xc5, 0xaf, 0x82, 0xee, 0xb6, 0x5f, - 0x66, 0xf9, 0xb7, 0xb2, 0xfa, 0x56, 0x2a, 0x19, 0x40, 0x19, 0xe5, 0xf5, 0x69, 0x12, 0x8e, 0x74, - 0x76, 0x8a, 0x3d, 0x54, 0xf6, 0x69, 0x8d, 0x93, 0xd6, 0x13, 0x90, 0x0d, 0x61, 0xac, 0xc5, 0xea, - 0xec, 0xdf, 0x13, 0x90, 0x5c, 0x36, 0x64, 0xf4, 0x13, 0x03, 0x13, 0x21, 0x57, 0x70, 0x8b, 0x7d, - 0x5e, 0x01, 0xe6, 0x43, 0x6f, 0x87, 0xb8, 0x77, 0x63, 0x83, 0x6a, 0x2d, 0xc0, 0x9e, 0x78, 0xc8, - 0x2d, 0x53, 0x94, 0x89, 0x07, 0x43, 0x45, 0x9a, 0xf8, 0xe6, 0x77, 0x3a, 0xe8, 0x0b, 0x06, 0x46, - 0xda, 0x2f, 0x74, 0xde, 0x8c, 0x32, 0x84, 0x67, 0xcf, 0x5d, 0xdd, 0x9e, 0x3d, 0x9d, 0xd7, 0xcf, - 0x0c, 0x1c, 0x0d, 0xbf, 0xe3, 0x58, 0x8a, 0xce, 0x60, 0x37, 0x1a, 0x77, 0x2b, 0x4e, 0x34, 0xdf, - 0x0a, 0xc2, 0xab, 0xf8, 0x4b, 0xd1, 0xa9, 0x8c, 0x69, 0x05, 0x5b, 0x96, 0xc4, 0xd1, 0x97, 0x0c, - 0x8c, 0x75, 0xd4, 0xc3, 0xe7, 0x23, 0x0c, 0xe4, 0x87, 0xe0, 0x16, 0xb7, 0x0d, 0x41, 0x27, 0xf8, - 0x3d, 0x03, 0x87, 0x83, 0xcb, 0xbc, 0xd7, 0x23, 0x0d, 0x12, 0x80, 0xc4, 0xad, 0xc4, 0x85, 0x44, - 0x67, 0xfd, 0x3b, 0x03, 0x27, 0xb6, 0x2e, 0x85, 0xde, 0x8a, 0x16, 0x48, 0x9b, 0xa3, 0x72, 0x1f, - 0xee, 0x04, 0x2a, 0x5d, 0xd9, 0x73, 0x06, 0x4e, 0xf5, 0x54, 0x83, 0x7a, 0x2f, 0xc2, 0x34, 0x7a, - 0x01, 0xe6, 0xee, 0xee, 0x10, 0xb0, 0x6f, 0x89, 0x3d, 0x15, 0x28, 0xa2, 0x2c, 0xb1, 0x17, 0xe0, - 0x48, 0x4b, 0xec, 0xa7, 0x56, 0x80, 0x3e, 0x63, 0x60, 0xd8, 0x2b, 0x14, 0xbc, 0x11, 0x45, 0x31, - 0x2d, 0x6b, 0xae, 0xb4, 0x1d, 0x6b, 0x3a, 0xa3, 0xc7, 0x0c, 0xa0, 0x80, 0x64, 0xfc, 0xed, 0x68, - 0x62, 0xee, 0x80, 0xe1, 0x96, 0x63, 0x81, 0xe9, 0xd8, 0x35, 0x7d, 0x79, 0x6e, 0xb4, 0x5d, 0xb3, - 0x1d, 0x22, 0xe2, 0xae, 0x19, 0x94, 0x6a, 0xa2, 0x6f, 0x18, 0x38, 0xd4, 0x9d, 0x67, 0x96, 0xb6, - 0xe5, 0x05, 0x17, 0x85, 0x5b, 0x8a, 0x03, 0xa5, 0x63, 0x7f, 0x0f, 0xca, 0xe0, 0xae, 0x47, 0xfe, - 0x88, 0x74, 0xb2, 0xbf, 0x12, 0x17, 0x12, 0x9d, 0xf5, 0x53, 0x06, 0xc6, 0x83, 0xf3, 0x93, 0xed, - 0x0e, 0xd5, 0xf2, 0xf2, 0x3b, 0x31, 0x01, 0xb5, 0xa6, 0xbc, 0xf0, 0xd1, 0x2f, 0x2f, 0x32, 0xcc, - 0xb3, 0x17, 0x19, 0xe6, 0xf9, 0x8b, 0x0c, 0xf3, 0xe8, 0x65, 0xe6, 0xc0, 0xb3, 0x97, 0x99, 0x03, - 0x7f, 0xbc, 0xcc, 0x1c, 0xf8, 0xa0, 0x24, 0x13, 0xf3, 0x7e, 0xbd, 0x9c, 0xaf, 0xa8, 0xd5, 0x82, - 0x33, 0xe8, 0x74, 0x6b, 0xd4, 0x42, 0xdb, 0xa8, 0xd3, 0xde, 0xb0, 0xd3, 0xce, 0xb8, 0x85, 0x07, - 0x05, 0xfb, 0x1f, 0x7c, 0x1b, 0x1a, 0x36, 0xca, 0x03, 0xf6, 0x9f, 0xec, 0xce, 0xfd, 0x13, 0x00, - 0x00, 0xff, 0xff, 0x75, 0xd3, 0xa1, 0x57, 0xd5, 0x27, 0x00, 0x00, +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/tx.proto", fileDescriptor_3047b9df3153e42a) +} + +var fileDescriptor_3047b9df3153e42a = []byte{ + // 1517 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x5a, 0xcd, 0x6f, 0xd4, 0x46, + 0x14, 0xc7, 0xfb, 0x91, 0x8f, 0x97, 0x10, 0x89, 0x69, 0x08, 0xc6, 0xd0, 0xdd, 0xc5, 0x20, 0x88, + 0xd4, 0x64, 0x37, 0x04, 0x36, 0x7c, 0xa8, 0xb4, 0x4a, 0xd8, 0x02, 0x11, 0x09, 0x4d, 0x1d, 0xa0, + 0x55, 0x55, 0x15, 0x79, 0xd7, 0x83, 0x99, 0xc6, 0xbb, 0x76, 0x6d, 0x6f, 0x44, 0xb8, 0xf5, 0x2f, + 0x28, 0x52, 0xd5, 0x6b, 0x6f, 0x6d, 0x25, 0xa4, 0x22, 0xaa, 0x7e, 0x5c, 0x7b, 0xed, 0xa5, 0x12, + 0xea, 0xa5, 0x3d, 0xad, 0x10, 0x5c, 0x2a, 0xf5, 0xb6, 0xf7, 0x4a, 0x95, 0x3f, 0x76, 0x1c, 0xef, + 0xda, 0xc9, 0xae, 0xd7, 0xa9, 0x20, 0xcd, 0x6d, 0x3d, 0x3b, 0xef, 0x37, 0x33, 0xef, 0xf7, 0x7b, + 0xe3, 0x79, 0x6f, 0x0c, 0xe7, 0x1e, 0x10, 0xb9, 0x8c, 0xb1, 0xa8, 0x28, 0x44, 0xac, 0x55, 0x70, + 0x41, 0x22, 0x86, 0xa9, 0x93, 0x72, 0xdd, 0xc4, 0x52, 0x45, 0xad, 0x6a, 0x4e, 0xab, 0x82, 0x25, + 0x19, 0xeb, 0x05, 0x6d, 0x8d, 0x14, 0xcc, 0xfb, 0x79, 0x4d, 0x57, 0x4d, 0x15, 0xe5, 0xfd, 0x86, + 0xf9, 0x2d, 0x0c, 0xf3, 0xda, 0x1a, 0xe1, 0x0e, 0x57, 0x54, 0xa3, 0xaa, 0x1a, 0x77, 0x6c, 0xeb, + 0x82, 0xf3, 0xe0, 0x40, 0x71, 0xe3, 0xb2, 0x2a, 0xab, 0x4e, 0xbb, 0xf5, 0xcb, 0x69, 0xe5, 0x1f, + 0x27, 0xe1, 0xf0, 0xb2, 0x21, 0xaf, 0xe8, 0xaa, 0xa6, 0x1a, 0x78, 0x5e, 0x92, 0x3e, 0x28, 0xce, + 0x5c, 0x10, 0x54, 0xd5, 0xbc, 0x8c, 0x75, 0x13, 0x5d, 0x85, 0x01, 0x83, 0xc8, 0x35, 0xac, 0xb3, + 0x4c, 0x8e, 0x99, 0x1c, 0x5e, 0x28, 0x34, 0x1b, 0xd9, 0xd7, 0xd6, 0x45, 0x85, 0x48, 0xa2, 0x89, + 0x2f, 0xf2, 0x3a, 0xfe, 0xb4, 0x4e, 0x74, 0x2c, 0xf1, 0xbf, 0xff, 0x38, 0x3d, 0xee, 0x0e, 0x36, + 0x2f, 0x49, 0x3a, 0x36, 0x8c, 0x55, 0x53, 0x27, 0x35, 0x59, 0x70, 0xcd, 0xd1, 0x79, 0x48, 0x55, + 0xb0, 0x6e, 0xb2, 0x09, 0x1b, 0xe6, 0x44, 0xb3, 0x91, 0xcd, 0x75, 0xc2, 0x4c, 0x55, 0xc5, 0xfb, + 0x97, 0x4e, 0xcf, 0x9c, 0x3d, 0x5f, 0x3c, 0x37, 0x37, 0xc3, 0x0b, 0xb6, 0x05, 0x7a, 0x03, 0x52, + 0xa4, 0x76, 0x57, 0x65, 0x93, 0xb6, 0xe5, 0x21, 0xff, 0x04, 0x2c, 0x83, 0xb3, 0x33, 0x17, 0xe6, + 0x78, 0xc1, 0xee, 0x84, 0x10, 0xa4, 0x4c, 0x52, 0xc5, 0x6c, 0x2a, 0xc7, 0x4c, 0x26, 0x05, 0xfb, + 0x37, 0xba, 0x00, 0xc9, 0x75, 0x22, 0xb1, 0xe9, 0x1c, 0x33, 0x99, 0x5e, 0x38, 0xd5, 0x6c, 0x64, + 0x8f, 0x7b, 0xf6, 0xb2, 0x89, 0x2f, 0x9d, 0x9e, 0x52, 0x4c, 0x7c, 0x69, 0xae, 0x58, 0x3c, 0x53, + 0x9c, 0xa2, 0x0b, 0x12, 0x2c, 0x1b, 0xb4, 0x04, 0x07, 0xac, 0x39, 0xac, 0x56, 0xee, 0xe1, 0xaa, + 0x78, 0x1b, 0xeb, 0x06, 0x51, 0x6b, 0xec, 0x40, 0x8e, 0x99, 0xdc, 0xbf, 0x90, 0x69, 0x36, 0xb2, + 0x9c, 0x1f, 0x68, 0xc6, 0x03, 0xe2, 0x85, 0x4e, 0x43, 0x54, 0x82, 0xfd, 0x86, 0x0f, 0x69, 0xb0, + 0x2b, 0x24, 0xbf, 0x11, 0x7f, 0x1c, 0x8e, 0x85, 0xf2, 0x25, 0x60, 0x43, 0x53, 0x6b, 0x06, 0xe6, + 0xbf, 0x49, 0xd8, 0xac, 0xce, 0x6b, 0x9a, 0xae, 0xae, 0xef, 0x1c, 0xab, 0x17, 0x61, 0xd0, 0xa8, + 0x97, 0x3f, 0xc1, 0x95, 0x16, 0xb1, 0xb9, 0x66, 0x23, 0x7b, 0x34, 0x94, 0xd8, 0xd9, 0xb3, 0xbc, + 0xd0, 0x32, 0x40, 0x97, 0x61, 0xd4, 0xfd, 0x79, 0x1d, 0x6f, 0x2c, 0x4a, 0x2e, 0xbf, 0xd9, 0x66, + 0x23, 0x7b, 0x24, 0x04, 0x60, 0xb6, 0x38, 0xc7, 0x0b, 0x3e, 0x23, 0x2a, 0x8e, 0x54, 0x2f, 0xe2, + 0x48, 0x7b, 0xe2, 0x70, 0xbd, 0x19, 0xec, 0x27, 0xea, 0xcd, 0xbf, 0x12, 0x30, 0x66, 0xf5, 0x72, + 0xfe, 0xde, 0x35, 0x81, 0x11, 0xa8, 0xee, 0x74, 0x6c, 0xea, 0x1e, 0x88, 0xa2, 0x6e, 0x16, 0x26, + 0xfc, 0x9e, 0xa6, 0x24, 0xfc, 0x94, 0x84, 0xa3, 0x9e, 0xf0, 0x05, 0xbc, 0xae, 0xae, 0xe1, 0x3d, + 0x55, 0x07, 0xaa, 0x1a, 0xf1, 0x30, 0x6a, 0x60, 0x9d, 0x88, 0xca, 0x8d, 0x7a, 0xb5, 0x8c, 0x75, + 0x9b, 0x8a, 0x61, 0xc1, 0xd7, 0x86, 0x72, 0x30, 0xa2, 0xdb, 0x4e, 0xbc, 0x7c, 0x8f, 0x28, 0x92, + 0xbd, 0x17, 0x0d, 0x09, 0x9b, 0x9b, 0x3a, 0x19, 0x1d, 0x8a, 0xc2, 0xe8, 0x49, 0x38, 0xb1, 0x15, + 0x6d, 0x94, 0xdf, 0xdf, 0x12, 0x36, 0xbf, 0x6e, 0x28, 0xfe, 0x2f, 0xf8, 0x4d, 0xf7, 0xc2, 0xef, + 0xc0, 0x16, 0xfc, 0x0e, 0x76, 0xf2, 0xeb, 0xfa, 0x3d, 0xd4, 0x9d, 0xd4, 0xef, 0x8f, 0x92, 0x70, + 0x60, 0xd9, 0x90, 0xbd, 0x1e, 0x7b, 0xc1, 0xf4, 0xf2, 0x06, 0xd3, 0x11, 0xfb, 0xb5, 0xee, 0xe7, + 0x8a, 0x32, 0xf9, 0x77, 0x02, 0x58, 0xfb, 0x5f, 0x6b, 0xf1, 0x7b, 0xef, 0xfc, 0x50, 0x42, 0xe3, + 0x79, 0x53, 0xf1, 0x90, 0x0b, 0x73, 0x36, 0x65, 0xe4, 0xf1, 0x80, 0x13, 0x84, 0x92, 0xb4, 0xb2, + 0x46, 0x2c, 0xd6, 0x2a, 0xa2, 0x49, 0xd4, 0x5a, 0xa9, 0x75, 0x88, 0x27, 0x6a, 0x6d, 0x45, 0x25, + 0xb5, 0x18, 0xd9, 0x71, 0x0f, 0xbb, 0x89, 0x08, 0x87, 0xdd, 0x19, 0x48, 0x6a, 0xc4, 0xe1, 0x24, + 0xbd, 0xad, 0x33, 0xac, 0xae, 0x68, 0x1c, 0xd2, 0xc4, 0x58, 0x99, 0x9f, 0xb7, 0xa9, 0x18, 0x12, + 0x9c, 0x07, 0x34, 0x0d, 0x69, 0x45, 0x2c, 0x63, 0x25, 0x78, 0x7b, 0xf3, 0x06, 0x75, 0x7a, 0xa1, + 0xeb, 0x30, 0x5e, 0xd1, 0x95, 0x55, 0x7b, 0xfa, 0x96, 0xf3, 0xc8, 0x5d, 0x52, 0x11, 0x4d, 0x67, + 0xbf, 0xdb, 0xc2, 0x3a, 0xd0, 0x08, 0x5d, 0x05, 0x44, 0x0c, 0xa3, 0x8e, 0xf5, 0x55, 0x4f, 0x31, + 0x25, 0x67, 0x7b, 0x0c, 0x87, 0x0a, 0x30, 0x41, 0x45, 0x18, 0x94, 0x44, 0x53, 0xbc, 0x25, 0x2c, + 0xd9, 0x81, 0x3a, 0xbc, 0x70, 0xa4, 0xd9, 0xc8, 0x1e, 0x0a, 0x10, 0x69, 0x5d, 0x57, 0x78, 0xa1, + 0xd5, 0xd7, 0xda, 0x2b, 0xac, 0x9f, 0x57, 0x88, 0x82, 0x57, 0xc9, 0x03, 0xcc, 0x0e, 0xe7, 0x98, + 0xc9, 0x94, 0xe0, 0x6b, 0x43, 0x19, 0x00, 0xeb, 0xb9, 0x44, 0x64, 0x6c, 0x98, 0x2c, 0xd8, 0xbb, + 0xc9, 0xa6, 0x16, 0x74, 0x12, 0xc6, 0xbc, 0xa7, 0x9b, 0x1b, 0x1a, 0x66, 0x47, 0x2c, 0x7d, 0x0a, + 0x6d, 0xad, 0xe8, 0x6d, 0x18, 0xd3, 0xa9, 0xa4, 0xec, 0x7e, 0xa3, 0xb6, 0x8e, 0x43, 0xd7, 0xd9, + 0xd6, 0xbd, 0x33, 0x0e, 0xf6, 0x47, 0x88, 0x03, 0x74, 0x1d, 0x10, 0xa5, 0xa2, 0x84, 0x15, 0x2c, + 0x8b, 0xa6, 0xaa, 0xb3, 0x63, 0x41, 0x4e, 0xf3, 0x9f, 0x62, 0x03, 0xcc, 0xf8, 0x3c, 0x4c, 0x75, + 0x13, 0x2f, 0x34, 0xc0, 0x1e, 0xa6, 0xe1, 0xd4, 0xb2, 0x21, 0xdf, 0xd2, 0xac, 0x01, 0x5e, 0x81, + 0x18, 0xa3, 0xb1, 0x91, 0xec, 0x2a, 0x36, 0x66, 0x43, 0x62, 0xc3, 0xde, 0xfa, 0x7a, 0x0a, 0x81, + 0x74, 0xef, 0x21, 0x30, 0xe7, 0x85, 0x80, 0x13, 0x8b, 0x47, 0x9b, 0x8d, 0x2c, 0xeb, 0x59, 0xab, + 0x55, 0x62, 0xe2, 0xaa, 0x66, 0x6e, 0x6c, 0x13, 0x03, 0x83, 0xdb, 0xc6, 0xc0, 0x50, 0x17, 0x31, + 0x30, 0x1c, 0x18, 0x03, 0x1d, 0x12, 0x86, 0xf8, 0x24, 0x3c, 0x12, 0x4d, 0xc2, 0xa7, 0xa1, 0xd0, + 0xa5, 0x22, 0xa9, 0x8a, 0xbf, 0x4e, 0xd8, 0x2a, 0xb6, 0x30, 0x76, 0xa3, 0x8a, 0x83, 0x15, 0x99, + 0xea, 0x59, 0x91, 0xae, 0x6b, 0xbb, 0x71, 0x13, 0x75, 0xed, 0x17, 0x09, 0x18, 0xb5, 0x76, 0x14, + 0xc3, 0x5a, 0xf5, 0x6d, 0x22, 0xed, 0xa2, 0x73, 0x90, 0x4b, 0x60, 0xaa, 0x77, 0x02, 0xf9, 0x09, + 0x18, 0xdf, 0xec, 0x14, 0xef, 0xbc, 0x92, 0x80, 0x83, 0xce, 0xfe, 0x7b, 0x43, 0xad, 0xbc, 0x6c, + 0x85, 0xc0, 0xc0, 0x72, 0x45, 0x2a, 0xb6, 0x72, 0x45, 0x3a, 0xca, 0x21, 0x30, 0x0b, 0xaf, 0x07, + 0xfa, 0x8b, 0x7a, 0xf4, 0xb3, 0x84, 0x9b, 0x5d, 0x55, 0xd5, 0xf5, 0xdd, 0x98, 0x5d, 0xb5, 0x27, + 0x47, 0xa9, 0x80, 0x4c, 0xb4, 0x95, 0xb4, 0x6c, 0x76, 0x01, 0x75, 0xd0, 0x77, 0x09, 0x47, 0x8b, + 0xd4, 0x85, 0x8b, 0x15, 0xf1, 0xa5, 0x56, 0x5c, 0x32, 0x36, 0xc5, 0xa5, 0xa2, 0x28, 0x2e, 0xe3, + 0x54, 0x49, 0xda, 0xdd, 0x45, 0xfd, 0xf9, 0x7d, 0xd2, 0x4d, 0x02, 0xad, 0x14, 0x71, 0xc7, 0xa2, + 0xf8, 0x55, 0xd0, 0x5d, 0xff, 0x65, 0x96, 0xff, 0x2a, 0xab, 0x6f, 0xa5, 0x92, 0x01, 0x94, 0x51, + 0x5e, 0x9f, 0x24, 0xe1, 0x50, 0x7b, 0xa7, 0xd8, 0x43, 0x65, 0x8f, 0xd6, 0x38, 0x69, 0x3d, 0x06, + 0xd9, 0x10, 0xc6, 0x5a, 0xac, 0xce, 0xfe, 0x33, 0x01, 0xc9, 0x65, 0x43, 0x46, 0x3f, 0x33, 0x30, + 0x11, 0x72, 0x05, 0xb7, 0xd8, 0xe3, 0x15, 0x60, 0x3e, 0xf4, 0x76, 0x88, 0x7b, 0x2f, 0x36, 0xa8, + 0xd6, 0x02, 0xec, 0x89, 0x87, 0xdc, 0x32, 0x45, 0x99, 0x78, 0x30, 0x54, 0xa4, 0x89, 0x6f, 0x7d, + 0xa7, 0x83, 0xbe, 0x64, 0x60, 0x64, 0xf3, 0x85, 0xce, 0x5b, 0x51, 0x86, 0xf0, 0xec, 0xb9, 0x2b, + 0xfd, 0xd9, 0xd3, 0x79, 0xfd, 0xc2, 0xc0, 0xe1, 0xf0, 0x3b, 0x8e, 0xa5, 0xe8, 0x0c, 0x76, 0xa2, + 0x71, 0x37, 0xe3, 0x44, 0xf3, 0xad, 0x20, 0xbc, 0x8a, 0xbf, 0x14, 0x9d, 0xca, 0x98, 0x56, 0xb0, + 0x6d, 0x49, 0x1c, 0x7d, 0xc5, 0xc0, 0x58, 0x5b, 0x3d, 0x7c, 0x3e, 0xc2, 0x40, 0x7e, 0x08, 0x6e, + 0xb1, 0x6f, 0x08, 0x3a, 0xc1, 0x1f, 0x18, 0x38, 0x18, 0x5c, 0xe6, 0xbd, 0x16, 0x69, 0x90, 0x00, + 0x24, 0x6e, 0x25, 0x2e, 0x24, 0x3a, 0xeb, 0x3f, 0x18, 0x38, 0xb6, 0x7d, 0x29, 0xf4, 0x66, 0xb4, + 0x40, 0xda, 0x1a, 0x95, 0xfb, 0x68, 0x27, 0x50, 0xe9, 0xca, 0x9e, 0x31, 0x70, 0xa2, 0xab, 0x1a, + 0xd4, 0xfb, 0x11, 0xa6, 0xd1, 0x0d, 0x30, 0x77, 0x67, 0x87, 0x80, 0x7d, 0x4b, 0xec, 0xaa, 0x40, + 0x11, 0x65, 0x89, 0xdd, 0x00, 0x47, 0x5a, 0x62, 0x2f, 0xb5, 0x02, 0xf4, 0x39, 0x03, 0xc3, 0x5e, + 0xa1, 0xe0, 0xcd, 0x28, 0x8a, 0x69, 0x59, 0x73, 0xa5, 0x7e, 0xac, 0xe9, 0x8c, 0x1e, 0x31, 0x80, + 0x02, 0x92, 0xf1, 0x77, 0xa2, 0x89, 0xb9, 0x0d, 0x86, 0x5b, 0x8e, 0x05, 0xa6, 0x6d, 0xd7, 0xf4, + 0xe5, 0xb9, 0xd1, 0x76, 0xcd, 0xcd, 0x10, 0x11, 0x77, 0xcd, 0xa0, 0x54, 0x13, 0x7d, 0xcb, 0xc0, + 0x81, 0xce, 0x3c, 0xb3, 0xd4, 0x97, 0x17, 0x5c, 0x14, 0x6e, 0x29, 0x0e, 0x94, 0xb6, 0xfd, 0x3d, + 0x28, 0x83, 0xbb, 0x16, 0xf9, 0x25, 0xd2, 0xce, 0xfe, 0x4a, 0x5c, 0x48, 0x74, 0xd6, 0x4f, 0x18, + 0x18, 0x0f, 0xce, 0x4f, 0xfa, 0x1d, 0xaa, 0xe5, 0xe5, 0x77, 0x63, 0x02, 0x6a, 0x4d, 0x79, 0xe1, + 0xe3, 0x5f, 0x9f, 0x67, 0x98, 0xa7, 0xcf, 0x33, 0xcc, 0xb3, 0xe7, 0x19, 0xe6, 0xe1, 0x8b, 0xcc, + 0xbe, 0xa7, 0x2f, 0x32, 0xfb, 0xfe, 0x7c, 0x91, 0xd9, 0xf7, 0x61, 0x49, 0x26, 0xe6, 0xbd, 0x7a, + 0x39, 0x5f, 0x51, 0xab, 0x05, 0x67, 0xd0, 0xe9, 0xa0, 0xaf, 0xf7, 0xa6, 0xbd, 0x61, 0xa7, 0xdd, + 0xef, 0xf7, 0xee, 0x3b, 0x5f, 0xf0, 0x6d, 0x68, 0xd8, 0x28, 0x0f, 0xd8, 0x1f, 0xd9, 0x9d, 0xf9, + 0x37, 0x00, 0x00, 0xff, 0xff, 0x33, 0xd2, 0xda, 0xfb, 0x00, 0x28, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -2806,7 +2808,7 @@ var _Msg_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "pki/tx.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/pki/tx.proto", } func (m *MsgProposeAddX509RootCert) Marshal() (dAtA []byte, err error) { diff --git a/x/pki/types/unique_certificate.pb.go b/x/pki/types/unique_certificate.pb.go index abf679f97..69a745880 100644 --- a/x/pki/types/unique_certificate.pb.go +++ b/x/pki/types/unique_certificate.pb.go @@ -1,11 +1,11 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: pki/unique_certificate.proto +// source: zigbeealliance/distributedcomplianceledger/pki/unique_certificate.proto package types import ( fmt "fmt" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -32,7 +32,7 @@ func (m *UniqueCertificate) Reset() { *m = UniqueCertificate{} } func (m *UniqueCertificate) String() string { return proto.CompactTextString(m) } func (*UniqueCertificate) ProtoMessage() {} func (*UniqueCertificate) Descriptor() ([]byte, []int) { - return fileDescriptor_13bd02dd73fed7e6, []int{0} + return fileDescriptor_59ed78d3487c5a69, []int{0} } func (m *UniqueCertificate) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -86,25 +86,28 @@ func init() { proto.RegisterType((*UniqueCertificate)(nil), "zigbeealliance.distributedcomplianceledger.pki.UniqueCertificate") } -func init() { proto.RegisterFile("pki/unique_certificate.proto", fileDescriptor_13bd02dd73fed7e6) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/pki/unique_certificate.proto", fileDescriptor_59ed78d3487c5a69) +} -var fileDescriptor_13bd02dd73fed7e6 = []byte{ - // 239 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x90, 0xbf, 0x4e, 0xc3, 0x30, - 0x10, 0xc6, 0x63, 0x90, 0x0a, 0x58, 0x2c, 0x64, 0x40, 0x19, 0x90, 0x55, 0x75, 0xea, 0x12, 0x7b, - 0xe0, 0x0d, 0x80, 0x99, 0xa1, 0x12, 0x0b, 0x03, 0x28, 0x76, 0x8e, 0x70, 0x6a, 0xfe, 0x98, 0xf3, - 0x59, 0x02, 0x9e, 0x82, 0xc7, 0x62, 0xec, 0xc8, 0x88, 0x92, 0x17, 0x41, 0x24, 0x54, 0xa5, 0xe3, - 0xf7, 0x9d, 0xf4, 0xe9, 0x7e, 0x3f, 0x79, 0xe1, 0xd7, 0x68, 0x62, 0x8b, 0x2f, 0x11, 0x1e, 0x1d, - 0x10, 0xe3, 0x13, 0xba, 0x82, 0x41, 0x7b, 0xea, 0xb8, 0x4b, 0xf5, 0x3b, 0x56, 0x16, 0xa0, 0xa8, - 0x6b, 0x2c, 0x5a, 0x07, 0xba, 0xc4, 0xc0, 0x84, 0x36, 0x32, 0x94, 0xae, 0x6b, 0xfc, 0xd4, 0xd6, - 0x50, 0x56, 0x40, 0xda, 0xaf, 0x71, 0x81, 0xf2, 0xec, 0x6e, 0xdc, 0xba, 0xde, 0x4d, 0xa5, 0xe7, - 0x72, 0x86, 0x21, 0x44, 0xa0, 0x4c, 0xcc, 0xc5, 0xf2, 0x64, 0xf5, 0x97, 0xd2, 0x85, 0x3c, 0x0d, - 0x40, 0x58, 0xd4, 0xb7, 0xb1, 0xb1, 0x40, 0xd9, 0xc1, 0x78, 0xdd, 0xeb, 0xd2, 0x4c, 0x1e, 0x79, - 0x82, 0x00, 0x2d, 0x67, 0x87, 0x73, 0xb1, 0x3c, 0x5e, 0x6d, 0xe3, 0xd5, 0xc3, 0x67, 0xaf, 0xc4, - 0xa6, 0x57, 0xe2, 0xbb, 0x57, 0xe2, 0x63, 0x50, 0xc9, 0x66, 0x50, 0xc9, 0xd7, 0xa0, 0x92, 0xfb, - 0x9b, 0x0a, 0xf9, 0x39, 0x5a, 0xed, 0xba, 0xc6, 0x4c, 0xff, 0xe7, 0x5b, 0x00, 0xf3, 0x0f, 0x20, - 0xdf, 0x11, 0xe4, 0x13, 0x82, 0x79, 0x35, 0xbf, 0x36, 0xf8, 0xcd, 0x43, 0xb0, 0xb3, 0xd1, 0xc0, - 0xe5, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x97, 0x5d, 0xbe, 0x96, 0x21, 0x01, 0x00, 0x00, +var fileDescriptor_59ed78d3487c5a69 = []byte{ + // 242 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xbf, 0x4a, 0xc4, 0x40, + 0x10, 0xc6, 0xb3, 0x0a, 0xa7, 0x2e, 0x36, 0xa6, 0x90, 0x54, 0xcb, 0x71, 0xd5, 0x35, 0xd9, 0x14, + 0xbe, 0x81, 0x0a, 0x76, 0x16, 0x07, 0x36, 0x16, 0x4a, 0x76, 0x33, 0xc6, 0xe1, 0xf2, 0x67, 0x9d, + 0x9d, 0x05, 0xf5, 0x29, 0x7c, 0x2c, 0xcb, 0x2b, 0x2d, 0x25, 0x79, 0x11, 0x61, 0xef, 0xe4, 0x08, + 0x88, 0xe5, 0xf7, 0x0d, 0xfc, 0x98, 0xef, 0x27, 0x6f, 0xde, 0xb1, 0x36, 0x00, 0x65, 0xd3, 0x60, + 0xd9, 0x59, 0x28, 0x2a, 0xf4, 0x4c, 0x68, 0x02, 0x43, 0x65, 0xfb, 0xd6, 0x6d, 0xdb, 0x06, 0xaa, + 0x1a, 0xa8, 0x70, 0x6b, 0x2c, 0x42, 0x87, 0x2f, 0x01, 0x1e, 0x2d, 0x10, 0xe3, 0x13, 0xda, 0x92, + 0x41, 0x3b, 0xea, 0xb9, 0x4f, 0xf5, 0x14, 0xa4, 0xff, 0x01, 0x69, 0xb7, 0xc6, 0x05, 0xca, 0xb3, + 0xbb, 0xc8, 0xba, 0xda, 0xa3, 0xd2, 0x73, 0x39, 0x43, 0xef, 0x03, 0x50, 0x26, 0xe6, 0x62, 0x79, + 0xb2, 0xda, 0xa5, 0x74, 0x21, 0x4f, 0x3d, 0x10, 0x96, 0xcd, 0x6d, 0x68, 0x0d, 0x50, 0x76, 0x10, + 0xaf, 0x93, 0x2e, 0xcd, 0xe4, 0x91, 0x23, 0xf0, 0xd0, 0x71, 0x76, 0x38, 0x17, 0xcb, 0xe3, 0xd5, + 0x6f, 0xbc, 0x7c, 0xf8, 0x1c, 0x94, 0xd8, 0x0c, 0x4a, 0x7c, 0x0f, 0x4a, 0x7c, 0x8c, 0x2a, 0xd9, + 0x8c, 0x2a, 0xf9, 0x1a, 0x55, 0x72, 0x7f, 0x5d, 0x23, 0x3f, 0x07, 0xa3, 0x6d, 0xdf, 0x16, 0xdb, + 0xff, 0xf3, 0xbf, 0x4c, 0xe4, 0xfb, 0x05, 0xf9, 0xce, 0xc5, 0x6b, 0xb4, 0xc1, 0x6f, 0x0e, 0xbc, + 0x99, 0x45, 0x03, 0x17, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xf8, 0x2c, 0x89, 0x85, 0x4c, 0x01, + 0x00, 0x00, } func (m *UniqueCertificate) Marshal() (dAtA []byte, err error) { diff --git a/x/validator/abci.go b/x/validator/abci.go index 4657b53b2..dc03df316 100644 --- a/x/validator/abci.go +++ b/x/validator/abci.go @@ -4,10 +4,10 @@ import ( "fmt" "time" + abci "github.com/cometbft/cometbft/abci/types" "github.com/cosmos/cosmos-sdk/telemetry" sdk "github.com/cosmos/cosmos-sdk/types" evidencetypes "github.com/cosmos/cosmos-sdk/x/evidence/types" - abci "github.com/tendermint/tendermint/abci/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" ) @@ -19,7 +19,7 @@ func BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock, k keeper.Keeper) // (double signing, light client attack, etc.) for _, tmEvidence := range req.ByzantineValidators { switch tmEvidence.Type { - case abci.EvidenceType_DUPLICATE_VOTE, abci.EvidenceType_LIGHT_CLIENT_ATTACK: + case abci.MisbehaviorType_DUPLICATE_VOTE, abci.MisbehaviorType_LIGHT_CLIENT_ATTACK: //nolint:nosnakecase evidence := evidencetypes.FromABCIEvidence(tmEvidence) k.HandleDoubleSign(ctx, evidence.(*evidencetypes.Equivocation)) default: diff --git a/x/validator/client/cli/query.go b/x/validator/client/cli/query.go index 1130dfcc1..6cd4a6914 100644 --- a/x/validator/client/cli/query.go +++ b/x/validator/client/cli/query.go @@ -13,7 +13,7 @@ import ( ) // GetQueryCmd returns the cli query commands for this module. -func GetQueryCmd(queryRoute string) *cobra.Command { +func GetQueryCmd(_ string) *cobra.Command { // Group validator queries under a subcommand cmd := &cobra.Command{ Use: types.ModuleName, diff --git a/x/validator/client/cli/query_disabled_validator.go b/x/validator/client/cli/query_disabled_validator.go index 5b7d50836..18b2e31ab 100644 --- a/x/validator/client/cli/query_disabled_validator.go +++ b/x/validator/client/cli/query_disabled_validator.go @@ -16,7 +16,10 @@ func CmdListDisabledValidator() *cobra.Command { Use: "all-disabled-nodes", Short: "Query the list of all disabled validators", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -55,7 +58,10 @@ func CmdShowDisabledValidator() *cobra.Command { Short: "Query disabled validator by address", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.DisabledValidator diff --git a/x/validator/client/cli/query_disabled_validator_test.go b/x/validator/client/cli/query_disabled_validator_test.go index 81445cc61..a90c36e24 100644 --- a/x/validator/client/cli/query_disabled_validator_test.go +++ b/x/validator/client/cli/query_disabled_validator_test.go @@ -5,10 +5,10 @@ import ( "strconv" "testing" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" cliutils "github.com/zigbee-alliance/distributed-compliance-ledger/utils/cli" diff --git a/x/validator/client/cli/query_last_validator_power.go b/x/validator/client/cli/query_last_validator_power.go index 3cfec250c..1a09868b7 100644 --- a/x/validator/client/cli/query_last_validator_power.go +++ b/x/validator/client/cli/query_last_validator_power.go @@ -17,7 +17,10 @@ func CmdListLastValidatorPower() *cobra.Command { Use: "all-last-powers", Short: "list all LastValidatorPower", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -54,7 +57,10 @@ func CmdShowLastValidatorPower() *cobra.Command { Short: "shows a LastValidatorPower", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } addr, err := sdk.ValAddressFromBech32(viper.GetString(FlagAddress)) if err != nil { diff --git a/x/validator/client/cli/query_last_validator_power_test.go b/x/validator/client/cli/query_last_validator_power_test.go index a41e3eca9..8b0854dd9 100644 --- a/x/validator/client/cli/query_last_validator_power_test.go +++ b/x/validator/client/cli/query_last_validator_power_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/client/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" diff --git a/x/validator/client/cli/query_proposed_disable_validator.go b/x/validator/client/cli/query_proposed_disable_validator.go index 69d384a54..350828fe4 100644 --- a/x/validator/client/cli/query_proposed_disable_validator.go +++ b/x/validator/client/cli/query_proposed_disable_validator.go @@ -16,7 +16,10 @@ func CmdListProposedDisableValidator() *cobra.Command { Use: "all-proposed-disable-nodes", Short: "Query the list of all proposed disable validators", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -55,7 +58,10 @@ func CmdShowProposedDisableValidator() *cobra.Command { Short: "Query proposed disable validator by address", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.ProposedDisableValidator diff --git a/x/validator/client/cli/query_proposed_disable_validator_test.go b/x/validator/client/cli/query_proposed_disable_validator_test.go index fdad1b67f..935e10eeb 100644 --- a/x/validator/client/cli/query_proposed_disable_validator_test.go +++ b/x/validator/client/cli/query_proposed_disable_validator_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" diff --git a/x/validator/client/cli/query_rejected_node.go b/x/validator/client/cli/query_rejected_node.go index d7fddc962..5bb41249e 100644 --- a/x/validator/client/cli/query_rejected_node.go +++ b/x/validator/client/cli/query_rejected_node.go @@ -17,7 +17,10 @@ func CmdListRejectedNode() *cobra.Command { Use: "all-rejected-disable-nodes", Short: "Query the list of all rejected disable validators", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -51,7 +54,10 @@ func CmdShowRejectedNode() *cobra.Command { Short: "Query rejected disable validator by address", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } addr, err := sdk.ValAddressFromBech32(viper.GetString(FlagAddress)) if err != nil { diff --git a/x/validator/client/cli/query_rejected_node_test.go b/x/validator/client/cli/query_rejected_node_test.go index 56274b8c8..0032ad807 100644 --- a/x/validator/client/cli/query_rejected_node_test.go +++ b/x/validator/client/cli/query_rejected_node_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" diff --git a/x/validator/client/cli/query_validator.go b/x/validator/client/cli/query_validator.go index 04fdf5d5d..548e37c56 100644 --- a/x/validator/client/cli/query_validator.go +++ b/x/validator/client/cli/query_validator.go @@ -57,7 +57,10 @@ func CmdShowValidator() *cobra.Command { Short: "shows a Validator", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } addr, err := sdk.ValAddressFromBech32(viper.GetString(FlagAddress)) if err != nil { diff --git a/x/validator/client/cli/query_validator_test.go b/x/validator/client/cli/query_validator_test.go index 3fe3791bc..414c4b25a 100644 --- a/x/validator/client/cli/query_validator_test.go +++ b/x/validator/client/cli/query_validator_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/client/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" diff --git a/x/validator/client/cli/tx_create_validator.go b/x/validator/client/cli/tx_create_validator.go index 3251b8091..271bcaf5f 100644 --- a/x/validator/client/cli/tx_create_validator.go +++ b/x/validator/client/cli/tx_create_validator.go @@ -28,8 +28,10 @@ func CmdCreateValidator() *cobra.Command { return err } - txf := tx.NewFactoryCLI(clientCtx, cmd.Flags()). - WithTxConfig(clientCtx.TxConfig).WithAccountRetriever(clientCtx.AccountRetriever) + txf, err := tx.NewFactoryCLI(clientCtx, cmd.Flags()) + if err != nil { + return err + } _, msg, err := newBuildCreateValidatorMsg(clientCtx, txf, cmd.Flags()) if err != nil { return err diff --git a/x/validator/genesis.go b/x/validator/genesis.go index 2aec9b691..5c85dac0e 100644 --- a/x/validator/genesis.go +++ b/x/validator/genesis.go @@ -1,10 +1,10 @@ package validator import ( + abci "github.com/cometbft/cometbft/abci/types" + tmtypes "github.com/cometbft/cometbft/types" cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" sdk "github.com/cosmos/cosmos-sdk/types" - abci "github.com/tendermint/tendermint/abci/types" - tmtypes "github.com/tendermint/tendermint/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" ) diff --git a/x/validator/handler.go b/x/validator/handler.go index f33e21735..fe5e307e3 100644 --- a/x/validator/handler.go +++ b/x/validator/handler.go @@ -3,6 +3,7 @@ package validator import ( "fmt" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/keeper" @@ -45,7 +46,7 @@ func NewHandler(k keeper.Keeper) sdk.Handler { default: errMsg := fmt.Sprintf("unrecognized %s message type: %T", types.ModuleName, msg) - return nil, sdkerrors.Wrap(sdkerrors.ErrUnknownRequest, errMsg) + return nil, errors.Wrap(sdkerrors.ErrUnknownRequest, errMsg) } } } diff --git a/x/validator/keeper/keeper.go b/x/validator/keeper/keeper.go index 584a6430e..fc7577875 100644 --- a/x/validator/keeper/keeper.go +++ b/x/validator/keeper/keeper.go @@ -4,17 +4,18 @@ import ( "fmt" "math" + "github.com/cometbft/cometbft/libs/log" "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/tendermint/tendermint/libs/log" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" ) type ( Keeper struct { cdc codec.BinaryCodec - storeKey sdk.StoreKey - memKey sdk.StoreKey + storeKey storetypes.StoreKey + memKey storetypes.StoreKey dclauthKeeper types.DclauthKeeper } @@ -23,7 +24,7 @@ type ( func NewKeeper( cdc codec.BinaryCodec, storeKey, - memKey sdk.StoreKey, + memKey storetypes.StoreKey, dclauthKeeper types.DclauthKeeper, ) *Keeper { diff --git a/x/validator/keeper/msg_server_approve_disable_validator.go b/x/validator/keeper/msg_server_approve_disable_validator.go index c08ed07c0..62ad43365 100644 --- a/x/validator/keeper/msg_server_approve_disable_validator.go +++ b/x/validator/keeper/msg_server_approve_disable_validator.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" @@ -13,17 +14,17 @@ func (k msgServer) ApproveDisableValidator(goCtx context.Context, msg *types.Msg creatorAddr, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid creator address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid creator address: (%s)", err) } validatorAddr, err := sdk.ValAddressFromBech32(msg.Address) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid validator address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid validator address: (%s)", err) } // check if message creator has enough rights to approve disable validator if !k.dclauthKeeper.HasRole(ctx, creatorAddr, types.VoteForDisableValidatorRole) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgApproveDisableValidator transaction should be signed by an account with the %s role", types.VoteForDisableValidatorRole, ) @@ -37,7 +38,7 @@ func (k msgServer) ApproveDisableValidator(goCtx context.Context, msg *types.Msg // check if disable validator already has approval form message creator if proposedDisableValidator.HasApprovalFrom(creatorAddr) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "Disabled validator with address=%v already has approval from=%v", msg.Address, msg.Creator, ) diff --git a/x/validator/keeper/msg_server_create_validator.go b/x/validator/keeper/msg_server_create_validator.go index ffb1c6755..a2fc9c8bd 100644 --- a/x/validator/keeper/msg_server_create_validator.go +++ b/x/validator/keeper/msg_server_create_validator.go @@ -3,11 +3,12 @@ package keeper import ( "context" + "cosmossdk.io/errors" + tmstrings "github.com/cometbft/cometbft/libs/strings" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" sdkstakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" - tmstrings "github.com/tendermint/tendermint/libs/strings" dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" ) @@ -22,7 +23,7 @@ func (k msgServer) CreateValidator(goCtx context.Context, msg *types.MsgCreateVa // check if sender has enough rights to create a validator node if !k.dclauthKeeper.HasRole(ctx, sdk.AccAddress(valAddr), dclauthtypes.NodeAdmin) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "CreateValidator transaction should be signed by an account with the \"%s\" role", types.EnableDisableValidatorRole, ) @@ -40,7 +41,7 @@ func (k msgServer) CreateValidator(goCtx context.Context, msg *types.MsgCreateVa pk, ok := msg.PubKey.GetCachedValue().(cryptotypes.PubKey) if !ok { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidType, "Expecting cryptotypes.PubKey, got %T", pk) + return nil, errors.Wrapf(sdkerrors.ErrInvalidType, "Expecting cryptotypes.PubKey, got %T", pk) } if _, found := k.GetValidatorByConsAddr(ctx, sdk.GetConsAddress(pk)); found { @@ -55,7 +56,7 @@ func (k msgServer) CreateValidator(goCtx context.Context, msg *types.MsgCreateVa cp := ctx.ConsensusParams() if cp != nil && cp.Validator != nil { if !tmstrings.StringInSlice(pk.Type(), cp.Validator.PubKeyTypes) { - return nil, sdkerrors.Wrapf( + return nil, errors.Wrapf( sdkstakingtypes.ErrValidatorPubKeyTypeNotSupported, "got: %s, expected: %s", pk.Type(), cp.Validator.PubKeyTypes, ) diff --git a/x/validator/keeper/msg_server_disable_validator.go b/x/validator/keeper/msg_server_disable_validator.go index 917c0ce82..40e2b3d83 100644 --- a/x/validator/keeper/msg_server_disable_validator.go +++ b/x/validator/keeper/msg_server_disable_validator.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" sdkstakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" @@ -15,12 +16,12 @@ func (k msgServer) DisableValidator(goCtx context.Context, msg *types.MsgDisable creatorAddr, err := sdk.ValAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if message creator has enough rights to propose disable validator if !k.dclauthKeeper.HasRole(ctx, sdk.AccAddress(creatorAddr), types.EnableDisableValidatorRole) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "Disable validator transaction should be signed by an account with the %s role", types.EnableDisableValidatorRole, ) diff --git a/x/validator/keeper/msg_server_enable_validator.go b/x/validator/keeper/msg_server_enable_validator.go index febe24a7c..f01251be1 100644 --- a/x/validator/keeper/msg_server_enable_validator.go +++ b/x/validator/keeper/msg_server_enable_validator.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" @@ -13,12 +14,12 @@ func (k msgServer) EnableValidator(goCtx context.Context, msg *types.MsgEnableVa creatorAddr, err := sdk.ValAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err) } // check if message creator has enough rights to propose disable validator if !k.dclauthKeeper.HasRole(ctx, sdk.AccAddress(creatorAddr), types.EnableDisableValidatorRole) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "Enable validator transaction should be signed by an account with the %s role", types.EnableDisableValidatorRole, ) diff --git a/x/validator/keeper/msg_server_propose_disable_validator.go b/x/validator/keeper/msg_server_propose_disable_validator.go index b8f6d0c36..a2d89f6c3 100644 --- a/x/validator/keeper/msg_server_propose_disable_validator.go +++ b/x/validator/keeper/msg_server_propose_disable_validator.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" sdkstakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" @@ -14,17 +15,17 @@ func (k msgServer) ProposeDisableValidator(goCtx context.Context, msg *types.Msg creatorAddr, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid creator address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid creator address: (%s)", err) } validatorAddr, err := sdk.ValAddressFromBech32(msg.Address) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid validator address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid validator address: (%s)", err) } // check if message creator has enough rights to propose disable validator if !k.dclauthKeeper.HasRole(ctx, creatorAddr, types.VoteForDisableValidatorRole) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgProposeDisableValidator transaction should be signed by an account with the %s role", types.VoteForDisableValidatorRole, ) diff --git a/x/validator/keeper/msg_server_reject_disable_node.go b/x/validator/keeper/msg_server_reject_disable_node.go index 1b19651d3..d19262a33 100644 --- a/x/validator/keeper/msg_server_reject_disable_node.go +++ b/x/validator/keeper/msg_server_reject_disable_node.go @@ -3,6 +3,7 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" @@ -13,17 +14,17 @@ func (k msgServer) RejectDisableValidator(goCtx context.Context, msg *types.MsgR creatorAddr, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid creator address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid creator address: (%s)", err) } validatorAddr, err := sdk.ValAddressFromBech32(msg.Address) if err != nil { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid validator address: (%s)", err) + return nil, errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid validator address: (%s)", err) } // check if message creator has enough rights to reject disable validator if !k.dclauthKeeper.HasRole(ctx, creatorAddr, types.VoteForDisableValidatorRole) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "MsgRejectDisableValidator transaction should be signed by an account with the %s role", types.VoteForDisableValidatorRole, ) @@ -37,7 +38,7 @@ func (k msgServer) RejectDisableValidator(goCtx context.Context, msg *types.MsgR // check if disable validator already has reject from message creator if proposedDisableValidator.HasRejectDisableFrom(creatorAddr) { - return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, + return nil, errors.Wrapf(sdkerrors.ErrUnauthorized, "Disabled validator with address=%v already has reject from=%v", msg.Address, msg.Creator, diff --git a/x/validator/keeper/process_malicious.go b/x/validator/keeper/process_malicious.go index 996766dec..b33cc47e1 100644 --- a/x/validator/keeper/process_malicious.go +++ b/x/validator/keeper/process_malicious.go @@ -3,6 +3,7 @@ package keeper import ( "fmt" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" evidencetypes "github.com/cosmos/cosmos-sdk/x/evidence/types" @@ -78,7 +79,7 @@ func (k Keeper) HandleDoubleSign(ctx sdk.Context, evidence *evidencetypes.Equivo sdk.NewEvent( slashingtypes.EventTypeSlash, sdk.NewAttribute(slashingtypes.AttributeKeyAddress, consAddr.String()), - sdk.NewAttribute(slashingtypes.AttributeKeyPower, fmt.Sprintf("%d", evidence.GetValidatorPower())), + sdk.NewAttribute(slashingtypes.AttributeKeyPower, fmt.Sprintf("%d", evidence.GetValidatorPower())), //nolint:perfsprint sdk.NewAttribute(slashingtypes.AttributeKeyReason, slashingtypes.AttributeValueDoubleSign), sdk.NewAttribute(slashingtypes.AttributeKeyJailed, consAddr.String()), ), @@ -91,14 +92,14 @@ func (k Keeper) HandleDoubleSign(ctx sdk.Context, evidence *evidencetypes.Equivo // Revoked Account valAddr, err := sdk.ValAddressFromBech32(validator.Owner) if err != nil { - logger.Info("Error:", sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err)) + logger.Info("Error:", errors.Wrapf(sdkerrors.ErrInvalidAddress, "Invalid Address: (%s)", err)) } accAddr := sdk.AccAddress(valAddr) // Move account to entity revoked account revokedAccount, err := k.dclauthKeeper.AddAccountToRevokedAccount( - ctx, accAddr, nil, dclauthTypes.RevokedAccount_MaliciousValidator) + ctx, accAddr, nil, dclauthTypes.RevokedAccount_MaliciousValidator) //nolint:nosnakecase if err != nil { logger.Info("Error:", err) } else { diff --git a/x/validator/keeper/process_malicious_test.go b/x/validator/keeper/process_malicious_test.go index 8b6a60114..8704bc8a4 100644 --- a/x/validator/keeper/process_malicious_test.go +++ b/x/validator/keeper/process_malicious_test.go @@ -9,8 +9,8 @@ package keeper_test // slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" // "github.com/stretchr/testify/require" -// // abci "github.com/tendermint/tendermint/abci/types" -// tmproto "github.com/tendermint/tendermint/proto/tendermint/types" +// // abci "github.com/cometbft/cometbft/abci/types" +// tmproto "github.com/cometbft/cometbft/proto/tendermint/types" // testkeeper "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/keeper" // "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" // ) diff --git a/x/validator/keeper/process_validator_set_change.go b/x/validator/keeper/process_validator_set_change.go index f62f091ac..ca8e8ab03 100644 --- a/x/validator/keeper/process_validator_set_change.go +++ b/x/validator/keeper/process_validator_set_change.go @@ -1,8 +1,8 @@ package keeper import ( + abci "github.com/cometbft/cometbft/abci/types" sdk "github.com/cosmos/cosmos-sdk/types" - abci "github.com/tendermint/tendermint/abci/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" ) diff --git a/x/validator/keeper/rejected_node.go b/x/validator/keeper/rejected_node.go index 825e8b938..5ee8bbc6f 100644 --- a/x/validator/keeper/rejected_node.go +++ b/x/validator/keeper/rejected_node.go @@ -10,8 +10,12 @@ import ( func (k Keeper) SetRejectedNode(ctx sdk.Context, rejectedDisableValidator types.RejectedDisableValidator) { store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.RejectedNodeKeyPrefix)) b := k.cdc.MustMarshal(&rejectedDisableValidator) + address, err := sdk.ValAddressFromBech32(rejectedDisableValidator.GetAddress()) + if err != nil { + return + } store.Set(types.RejectedNodeKey( - rejectedDisableValidator.GetAddress(), + address, ), b) } diff --git a/x/validator/keeper/validator.go b/x/validator/keeper/validator.go index d6d7a0eb8..2ed0022fb 100644 --- a/x/validator/keeper/validator.go +++ b/x/validator/keeper/validator.go @@ -64,15 +64,15 @@ func (k Keeper) RemoveValidator( if err != nil { // TODO ??? issue 99: the best way to deal with that panic(err) - } else { - store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.ValidatorByConsAddrKeyPrefix)) - store.Delete(types.ValidatorByConsAddrKey( - valConsAddr, - )) } + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.ValidatorByConsAddrKeyPrefix)) + store.Delete(types.ValidatorByConsAddrKey( + valConsAddr, + )) + // FIXME issue 99: owner should be a key here - store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.LastValidatorPowerKeyPrefix)) + store = prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.LastValidatorPowerKeyPrefix)) store.Delete(types.LastValidatorPowerKey(owner)) store = prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.ValidatorKeyPrefix)) diff --git a/x/validator/module.go b/x/validator/module.go index db47d86d1..2e3ed5ba7 100644 --- a/x/validator/module.go +++ b/x/validator/module.go @@ -5,6 +5,7 @@ import ( "encoding/json" "fmt" + abci "github.com/cometbft/cometbft/abci/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/codec" cdctypes "github.com/cosmos/cosmos-sdk/codec/types" @@ -13,7 +14,6 @@ import ( "github.com/gorilla/mux" "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" - abci "github.com/tendermint/tendermint/abci/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/client/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" @@ -61,7 +61,7 @@ func (AppModuleBasic) DefaultGenesis(cdc codec.JSONCodec) json.RawMessage { } // ValidateGenesis performs genesis state validation for the validator module. -func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncodingConfig, bz json.RawMessage) error { +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, _ client.TxEncodingConfig, bz json.RawMessage) error { var genState types.GenesisState if err := cdc.UnmarshalJSON(bz, &genState); err != nil { return fmt.Errorf("failed to unmarshal %s genesis state: %w", types.ModuleName, err) @@ -71,7 +71,7 @@ func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncod } // RegisterRESTRoutes registers the validator module's REST service handlers. -func (AppModuleBasic) RegisterRESTRoutes(clientCtx client.Context, rtr *mux.Router) { +func (AppModuleBasic) RegisterRESTRoutes(_ client.Context, _ *mux.Router) { // rest.RegisterHandlers(clientCtx, rtr) // FIXME issue 99: verify } @@ -113,19 +113,6 @@ func (am AppModule) Name() string { return am.AppModuleBasic.Name() } -// Route returns the validator module's message routing key. -func (am AppModule) Route() sdk.Route { - return sdk.NewRoute(types.RouterKey, NewHandler(am.keeper)) -} - -// QuerierRoute returns the validator module's query routing key. -func (AppModule) QuerierRoute() string { return types.QuerierRoute } - -// LegacyQuerierHandler returns the validator module's Querier. -func (am AppModule) LegacyQuerierHandler(legacyQuerierCdc *codec.LegacyAmino) sdk.Querier { - return nil -} - // RegisterServices registers a GRPC query service to respond to the // module-specific GRPC queries. func (am AppModule) RegisterServices(cfg module.Configurator) { diff --git a/x/validator/module_simulation.go b/x/validator/module_simulation.go index 958bdfcd6..7f07e89f5 100644 --- a/x/validator/module_simulation.go +++ b/x/validator/module_simulation.go @@ -4,7 +4,6 @@ import ( "math/rand" "github.com/cosmos/cosmos-sdk/baseapp" - simappparams "github.com/cosmos/cosmos-sdk/simapp/params" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/module" simtypes "github.com/cosmos/cosmos-sdk/types/simulation" @@ -18,7 +17,6 @@ import ( var ( _ = sample.AccAddress _ = validatorsimulation.FindAccount - _ = simappparams.StakePerAccount _ = simulation.MsgEntryKind _ = baseapp.Paramspace ) @@ -60,15 +58,10 @@ func (AppModule) GenerateGenesisState(simState *module.SimulationState) { } // ProposalContents doesn't return any content functions for governance proposals. -func (AppModule) ProposalContents(_ module.SimulationState) []simtypes.WeightedProposalContent { +func (AppModule) ProposalContents(_ module.SimulationState) []simtypes.WeightedProposalContent { //nolint:staticcheck return nil } -// RandomizedParams creates randomized param changes for the simulator. -func (am AppModule) RandomizedParams(_ *rand.Rand) []simtypes.ParamChange { - return []simtypes.ParamChange{} -} - // RegisterStoreDecoder registers a decoder. func (am AppModule) RegisterStoreDecoder(_ sdk.StoreDecoderRegistry) {} diff --git a/x/validator/simulation/approve_disable_validator.go b/x/validator/simulation/approve_disable_validator.go index 12b6fc20d..0664af1ad 100644 --- a/x/validator/simulation/approve_disable_validator.go +++ b/x/validator/simulation/approve_disable_validator.go @@ -10,7 +10,7 @@ import ( "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" ) -func SimulateMsgApproveDisableValidator(k keeper.Keeper) simtypes.Operation { +func SimulateMsgApproveDisableValidator(_ keeper.Keeper) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { simAccount, _ := simtypes.RandomAcc(r, accs) diff --git a/x/validator/simulation/disable_validator.go b/x/validator/simulation/disable_validator.go index 8ad904a5a..51ca095b5 100644 --- a/x/validator/simulation/disable_validator.go +++ b/x/validator/simulation/disable_validator.go @@ -11,7 +11,7 @@ import ( ) func SimulateMsgDisableValidator( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/validator/simulation/enable_validator.go b/x/validator/simulation/enable_validator.go index 863bcb1d7..e5c3369a3 100644 --- a/x/validator/simulation/enable_validator.go +++ b/x/validator/simulation/enable_validator.go @@ -11,7 +11,7 @@ import ( ) func SimulateMsgEnableValidator( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/validator/simulation/propose_disable_validator.go b/x/validator/simulation/propose_disable_validator.go index f323a9d14..042748c55 100644 --- a/x/validator/simulation/propose_disable_validator.go +++ b/x/validator/simulation/propose_disable_validator.go @@ -10,7 +10,7 @@ import ( "github.com/zigbee-alliance/distributed-compliance-ledger/x/validator/types" ) -func SimulateMsgProposeDisableValidator(k keeper.Keeper) simtypes.Operation { +func SimulateMsgProposeDisableValidator(_ keeper.Keeper) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { simAccount, _ := simtypes.RandomAcc(r, accs) diff --git a/x/validator/simulation/reject_disable_node.go b/x/validator/simulation/reject_disable_node.go index 891dd9a96..1df8a6393 100644 --- a/x/validator/simulation/reject_disable_node.go +++ b/x/validator/simulation/reject_disable_node.go @@ -11,7 +11,7 @@ import ( ) func SimulateMsgRejectDisableValidator( - k keeper.Keeper, + _ keeper.Keeper, ) simtypes.Operation { return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { diff --git a/x/validator/types/codec.go b/x/validator/types/codec.go index fd20786b1..ddca41aea 100644 --- a/x/validator/types/codec.go +++ b/x/validator/types/codec.go @@ -38,7 +38,7 @@ func RegisterInterfaces(registry cdctypes.InterfaceRegistry) { ) // this line is used by starport scaffolding # 3 - msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) + msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) //nolint:nosnakecase } var ModuleCdc = codec.NewProtoCodec(cdctypes.NewInterfaceRegistry()) diff --git a/x/validator/types/description.pb.go b/x/validator/types/description.pb.go index e45ee7439..8d3d76e57 100644 --- a/x/validator/types/description.pb.go +++ b/x/validator/types/description.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: validator/description.proto +// source: zigbeealliance/distributedcomplianceledger/validator/description.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -37,7 +37,7 @@ type Description struct { func (m *Description) Reset() { *m = Description{} } func (*Description) ProtoMessage() {} func (*Description) Descriptor() ([]byte, []int) { - return fileDescriptor_39ba45542c55bc8c, []int{0} + return fileDescriptor_2ad296d0157123cf, []int{0} } func (m *Description) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -98,27 +98,29 @@ func init() { proto.RegisterType((*Description)(nil), "zigbeealliance.distributedcomplianceledger.validator.Description") } -func init() { proto.RegisterFile("validator/description.proto", fileDescriptor_39ba45542c55bc8c) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/validator/description.proto", fileDescriptor_2ad296d0157123cf) +} -var fileDescriptor_39ba45542c55bc8c = []byte{ - // 261 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x4c, 0x90, 0xb1, 0x4e, 0xc3, 0x30, - 0x14, 0x45, 0x63, 0x40, 0x50, 0xc2, 0x16, 0x31, 0x44, 0x45, 0x72, 0x11, 0x13, 0x4b, 0xe2, 0x01, - 0x26, 0x46, 0xc4, 0xc4, 0xc8, 0xc8, 0x16, 0xc7, 0x4f, 0xe6, 0x09, 0x27, 0x8e, 0xec, 0x57, 0xa0, - 0x6c, 0xfc, 0x01, 0x23, 0x63, 0x3f, 0x87, 0xb1, 0x23, 0x23, 0x4a, 0x16, 0x3e, 0x03, 0x25, 0x6e, - 0x9b, 0x6e, 0x3e, 0xbe, 0xba, 0xd7, 0x3a, 0x8e, 0xcf, 0x5e, 0x0a, 0x83, 0xaa, 0x20, 0xeb, 0x84, - 0x02, 0x5f, 0x3a, 0x6c, 0x08, 0x6d, 0x9d, 0x37, 0xce, 0x92, 0x4d, 0xae, 0xdf, 0x51, 0x4b, 0x80, - 0xc2, 0x18, 0x2c, 0xea, 0x12, 0x72, 0x85, 0x9e, 0x1c, 0xca, 0x39, 0x81, 0x2a, 0x6d, 0xd5, 0x84, - 0x5b, 0x03, 0x4a, 0x83, 0xcb, 0xb7, 0x3b, 0xd3, 0x53, 0x6d, 0xb5, 0x1d, 0x06, 0x44, 0x7f, 0x0a, - 0x5b, 0x17, 0x1f, 0x2c, 0x3e, 0xb9, 0x1b, 0x5f, 0x48, 0xd2, 0xf8, 0xa8, 0xb2, 0x35, 0x3e, 0x83, - 0x4b, 0xd9, 0x39, 0xbb, 0x3c, 0x7e, 0xd8, 0x60, 0x32, 0x8d, 0x27, 0xa8, 0xa0, 0x26, 0xa4, 0x45, - 0xba, 0x37, 0x44, 0x5b, 0xee, 0x5b, 0xaf, 0x20, 0x3d, 0x12, 0xa4, 0xfb, 0xa1, 0xb5, 0xc6, 0x3e, - 0x51, 0x40, 0x05, 0x1a, 0x9f, 0x1e, 0x84, 0x64, 0x8d, 0x37, 0x93, 0xaf, 0xe5, 0x2c, 0xfa, 0x5b, - 0xce, 0xd8, 0xad, 0xfa, 0x6e, 0x39, 0x5b, 0xb5, 0x9c, 0xfd, 0xb6, 0x9c, 0x7d, 0x76, 0x3c, 0x5a, - 0x75, 0x3c, 0xfa, 0xe9, 0x78, 0xf4, 0x78, 0xaf, 0x91, 0x9e, 0xe6, 0x32, 0x2f, 0x6d, 0x25, 0x82, - 0x74, 0xb6, 0xb1, 0x16, 0x3b, 0xd6, 0xd9, 0xa8, 0x9d, 0x05, 0x6f, 0xf1, 0x26, 0xc6, 0x1f, 0xa4, - 0x45, 0x03, 0x5e, 0x1e, 0x0e, 0xc2, 0x57, 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, 0x3a, 0x9e, 0xc4, - 0xa1, 0x5b, 0x01, 0x00, 0x00, +var fileDescriptor_2ad296d0157123cf = []byte{ + // 268 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0xab, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x97, 0x25, 0xe6, 0x64, 0xa6, 0x24, 0x96, 0xe4, 0x17, 0xe9, 0xa7, 0xa4, 0x16, + 0x27, 0x17, 0x65, 0x16, 0x94, 0x64, 0xe6, 0xe7, 0xe9, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x99, + 0xa0, 0x9a, 0xa3, 0x87, 0xc7, 0x1c, 0x3d, 0xb8, 0x39, 0x52, 0x22, 0xe9, 0xf9, 0xe9, 0xf9, 0x60, + 0x03, 0xf4, 0x41, 0x2c, 0x88, 0x59, 0x4a, 0x8d, 0x8c, 0x5c, 0xdc, 0x2e, 0x08, 0x1b, 0x84, 0x24, + 0xb8, 0xd8, 0x73, 0xf3, 0xf3, 0x32, 0xb3, 0x53, 0x8b, 0x24, 0x18, 0x15, 0x18, 0x35, 0x38, 0x83, + 0x60, 0x5c, 0x21, 0x29, 0x2e, 0x8e, 0xcc, 0x94, 0xd4, 0xbc, 0x92, 0xcc, 0x92, 0x4a, 0x09, 0x26, + 0xb0, 0x14, 0x9c, 0x0f, 0xd2, 0x55, 0x9e, 0x9a, 0x54, 0x9c, 0x59, 0x92, 0x2a, 0xc1, 0x0c, 0xd1, + 0x05, 0xe5, 0x82, 0x64, 0x52, 0x52, 0x4b, 0x12, 0x33, 0x73, 0x8a, 0x25, 0x58, 0x20, 0x32, 0x50, + 0xae, 0x15, 0xc7, 0x8c, 0x05, 0xf2, 0x0c, 0x2f, 0x16, 0xc8, 0x33, 0x3a, 0xa5, 0x9c, 0x78, 0x24, + 0xc7, 0x78, 0xe1, 0x91, 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x13, 0x1e, 0xcb, 0x31, 0x5c, 0x78, + 0x2c, 0xc7, 0x70, 0xe3, 0xb1, 0x1c, 0x43, 0x94, 0x57, 0x7a, 0x66, 0x49, 0x46, 0x69, 0x92, 0x5e, + 0x72, 0x7e, 0xae, 0x3e, 0xc4, 0xd3, 0xba, 0xd8, 0x42, 0x4f, 0x17, 0xe1, 0x6d, 0x5d, 0x68, 0xf8, + 0x55, 0x20, 0x85, 0x60, 0x49, 0x65, 0x41, 0x6a, 0x71, 0x12, 0x1b, 0xd8, 0xc3, 0xc6, 0x80, 0x00, + 0x00, 0x00, 0xff, 0xff, 0xd0, 0x24, 0x4c, 0xf4, 0x86, 0x01, 0x00, 0x00, } func (this *Description) Equal(that interface{}) bool { diff --git a/x/validator/types/disabled_validator.pb.go b/x/validator/types/disabled_validator.pb.go index fb5c5da55..dc9f12965 100644 --- a/x/validator/types/disabled_validator.pb.go +++ b/x/validator/types/disabled_validator.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: validator/disabled_validator.proto +// source: zigbeealliance/distributedcomplianceledger/validator/disabled_validator.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -35,7 +35,7 @@ func (m *DisabledValidator) Reset() { *m = DisabledValidator{} } func (m *DisabledValidator) String() string { return proto.CompactTextString(m) } func (*DisabledValidator) ProtoMessage() {} func (*DisabledValidator) Descriptor() ([]byte, []int) { - return fileDescriptor_7d5907d654e5cf90, []int{0} + return fileDescriptor_ad0ec23eb7309c10, []int{0} } func (m *DisabledValidator) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -104,32 +104,32 @@ func init() { } func init() { - proto.RegisterFile("validator/disabled_validator.proto", fileDescriptor_7d5907d654e5cf90) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/validator/disabled_validator.proto", fileDescriptor_ad0ec23eb7309c10) } -var fileDescriptor_7d5907d654e5cf90 = []byte{ - // 329 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x91, 0xb1, 0x4e, 0xc3, 0x30, - 0x10, 0x86, 0x9b, 0x16, 0x28, 0x0d, 0x13, 0x01, 0xa4, 0xd0, 0xc1, 0xaa, 0x3a, 0x75, 0x89, 0x8d, - 0x0a, 0x1b, 0x53, 0x2b, 0x24, 0x24, 0x06, 0x86, 0x22, 0x90, 0x60, 0xa9, 0x1c, 0xfb, 0x14, 0x8c, - 0x9c, 0x38, 0xb2, 0xdd, 0x8a, 0xf2, 0x14, 0x3c, 0x0c, 0x0f, 0xc1, 0x58, 0x31, 0xb1, 0x81, 0xda, - 0x17, 0x41, 0xad, 0x9b, 0x86, 0x01, 0x09, 0x09, 0x46, 0xdf, 0xfd, 0xff, 0xff, 0xdd, 0xf9, 0xfc, - 0xf6, 0x98, 0x4a, 0xc1, 0xa9, 0x55, 0x9a, 0x70, 0x61, 0x68, 0x2c, 0x81, 0x0f, 0xd7, 0x25, 0x9c, - 0x6b, 0x65, 0x55, 0x70, 0xf2, 0x24, 0x92, 0x18, 0x80, 0x4a, 0x29, 0x68, 0xc6, 0x00, 0x73, 0x61, - 0xac, 0x16, 0xf1, 0xc8, 0x02, 0x67, 0x2a, 0xcd, 0x5d, 0x55, 0x02, 0x4f, 0x40, 0xe3, 0xb5, 0xb7, - 0x79, 0xc8, 0x94, 0x49, 0x95, 0x19, 0x2e, 0x33, 0x88, 0x7b, 0xb8, 0xc0, 0xe6, 0x41, 0x09, 0x4d, - 0x34, 0xcd, 0xac, 0x2b, 0xb7, 0x3f, 0xaa, 0xfe, 0xee, 0xd9, 0x6a, 0x88, 0x9b, 0x42, 0x11, 0x74, - 0xfd, 0x3a, 0xe5, 0x5c, 0x83, 0x31, 0xa1, 0xd7, 0xf2, 0x3a, 0x8d, 0x7e, 0xf8, 0xf6, 0x12, 0xed, - 0xaf, 0xf2, 0x7a, 0xae, 0x73, 0x65, 0xb5, 0xc8, 0x92, 0x41, 0x21, 0x5c, 0x78, 0x98, 0x86, 0x85, - 0x3d, 0xac, 0xfe, 0xe6, 0x59, 0x09, 0x83, 0x5b, 0xbf, 0x41, 0xf3, 0x5c, 0xab, 0x31, 0x95, 0x26, - 0xac, 0xb5, 0x6a, 0x9d, 0x9d, 0xee, 0x29, 0xfe, 0xcb, 0xe6, 0xf8, 0x7c, 0xb1, 0xd3, 0xa0, 0x4c, - 0x0b, 0x8e, 0xfc, 0xbd, 0xe2, 0x73, 0xfb, 0x93, 0x4b, 0xc5, 0xa1, 0xc7, 0x53, 0x91, 0x85, 0x1b, - 0x2d, 0xaf, 0xb3, 0x3d, 0xf8, 0xa9, 0x15, 0x5c, 0xfb, 0x75, 0x0d, 0x0f, 0xc0, 0xac, 0x09, 0x37, - 0xff, 0x3f, 0x4a, 0x91, 0xd5, 0xe7, 0xaf, 0x33, 0xe4, 0x4d, 0x67, 0xc8, 0xfb, 0x9c, 0x21, 0xef, - 0x79, 0x8e, 0x2a, 0xd3, 0x39, 0xaa, 0xbc, 0xcf, 0x51, 0xe5, 0xee, 0x22, 0x11, 0xf6, 0x7e, 0x14, - 0x63, 0xa6, 0x52, 0xe2, 0x48, 0x51, 0x81, 0x22, 0xdf, 0x50, 0x51, 0xc9, 0x8a, 0x1c, 0x8c, 0x3c, - 0x92, 0xf2, 0x9a, 0x76, 0x92, 0x83, 0x89, 0xb7, 0x96, 0xe7, 0x3c, 0xfe, 0x0a, 0x00, 0x00, 0xff, - 0xff, 0xa5, 0x68, 0xf8, 0x13, 0x5c, 0x02, 0x00, 0x00, +var fileDescriptor_ad0ec23eb7309c10 = []byte{ + // 333 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x91, 0x31, 0x4b, 0xc3, 0x40, + 0x18, 0x86, 0x9b, 0x56, 0xad, 0x8d, 0x93, 0xd1, 0x21, 0x76, 0x08, 0xc5, 0xa9, 0x4b, 0x2e, 0x52, + 0xdd, 0x5c, 0x6c, 0x11, 0x04, 0x41, 0x87, 0x8a, 0x82, 0x2e, 0xe5, 0x72, 0xf7, 0x11, 0x4f, 0x2e, + 0xb9, 0x70, 0x77, 0x2d, 0xd6, 0x5f, 0xe1, 0x8f, 0xf1, 0x47, 0x38, 0x16, 0x27, 0x37, 0xa5, 0xf9, + 0x23, 0x92, 0x5e, 0xd2, 0x2a, 0x14, 0x85, 0x3a, 0xe6, 0xfb, 0xde, 0xf7, 0xf9, 0xde, 0xdc, 0x6b, + 0x5f, 0x3c, 0xb1, 0x28, 0x04, 0xc0, 0x9c, 0x33, 0x9c, 0x10, 0x08, 0x28, 0x53, 0x5a, 0xb2, 0x70, + 0xa8, 0x81, 0x12, 0x11, 0xa7, 0x66, 0xca, 0x81, 0x46, 0x20, 0x83, 0x11, 0xe6, 0x8c, 0x62, 0x2d, + 0x64, 0xae, 0xc2, 0x21, 0x07, 0x3a, 0x98, 0x8f, 0x50, 0x2a, 0x85, 0x16, 0xce, 0xd1, 0x4f, 0x1c, + 0xfa, 0x05, 0x87, 0xe6, 0xde, 0xe6, 0x1e, 0x11, 0x2a, 0x16, 0x6a, 0x30, 0x63, 0x04, 0xe6, 0xc3, + 0x00, 0x9b, 0x27, 0x2b, 0xe5, 0x8b, 0x24, 0x4e, 0xb4, 0x21, 0xec, 0x7f, 0x54, 0xed, 0xed, 0xd3, + 0x22, 0xef, 0x4d, 0xa9, 0x70, 0x3a, 0x76, 0x1d, 0x53, 0x2a, 0x41, 0x29, 0xd7, 0x6a, 0x59, 0xed, + 0x46, 0xcf, 0x7d, 0x7b, 0xf1, 0x77, 0x8b, 0xd3, 0x5d, 0xb3, 0xb9, 0xd2, 0x92, 0x25, 0x51, 0xbf, + 0x14, 0xe6, 0x1e, 0x22, 0x21, 0xb7, 0xbb, 0xd5, 0xbf, 0x3c, 0x85, 0xd0, 0xb9, 0xb5, 0x1b, 0x38, + 0x4d, 0xa5, 0x18, 0x61, 0xae, 0xdc, 0x5a, 0xab, 0xd6, 0xde, 0xea, 0x1c, 0xa3, 0x55, 0x1e, 0x09, + 0x9d, 0xe5, 0xff, 0xd4, 0x5f, 0xd0, 0x9c, 0x03, 0x7b, 0xa7, 0xec, 0xa1, 0x37, 0xbe, 0x14, 0x14, + 0xba, 0x34, 0x66, 0x89, 0xbb, 0xd6, 0xb2, 0xda, 0x9b, 0xfd, 0x65, 0x2b, 0xe7, 0xda, 0xae, 0x4b, + 0x78, 0x00, 0xa2, 0x95, 0xbb, 0xfe, 0xff, 0x28, 0x25, 0xab, 0x47, 0x5f, 0xa7, 0x9e, 0x35, 0x99, + 0x7a, 0xd6, 0xe7, 0xd4, 0xb3, 0x9e, 0x33, 0xaf, 0x32, 0xc9, 0xbc, 0xca, 0x7b, 0xe6, 0x55, 0xee, + 0xce, 0x23, 0xa6, 0xef, 0x87, 0x21, 0x22, 0x22, 0x0e, 0xcc, 0x25, 0x7f, 0x59, 0x93, 0xfe, 0xe2, + 0x96, 0x5f, 0x74, 0xf9, 0xf8, 0xad, 0x4d, 0x3d, 0x4e, 0x41, 0x85, 0x1b, 0xb3, 0x3a, 0x0f, 0xbf, + 0x02, 0x00, 0x00, 0xff, 0xff, 0x37, 0x1f, 0xa8, 0x9f, 0xb2, 0x02, 0x00, 0x00, } func (m *DisabledValidator) Marshal() (dAtA []byte, err error) { diff --git a/x/validator/types/errors.go b/x/validator/types/errors.go index 9b534dfb9..d6a7266df 100644 --- a/x/validator/types/errors.go +++ b/x/validator/types/errors.go @@ -17,24 +17,24 @@ package types import ( "fmt" - sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "cosmossdk.io/errors" ) var ( - PoolIsFull = sdkerrors.Register(ModuleName, 601, "maximum number of active nodes reached") - ErrProposedDisableValidatorAlreadyExists = sdkerrors.Register(ModuleName, 602, "disable validator proposal already exists") - ErrProposedDisableValidatorDoesNotExist = sdkerrors.Register(ModuleName, 603, "disable validator proposal does not exist") - ErrDisabledValidatorAlreadytExists = sdkerrors.Register(ModuleName, 604, "disabled validator already exist") - ErrDisabledValidatorDoesNotExist = sdkerrors.Register(ModuleName, 605, "disabled validator does not exist") + PoolIsFull = errors.Register(ModuleName, 601, "maximum number of active nodes reached") + ErrProposedDisableValidatorAlreadyExists = errors.Register(ModuleName, 602, "disable validator proposal already exists") + ErrProposedDisableValidatorDoesNotExist = errors.Register(ModuleName, 603, "disable validator proposal does not exist") + ErrDisabledValidatorAlreadytExists = errors.Register(ModuleName, 604, "disabled validator already exist") + ErrDisabledValidatorDoesNotExist = errors.Register(ModuleName, 605, "disabled validator does not exist") ) func ErrPoolIsFull() error { - return sdkerrors.Wrapf(PoolIsFull, + return errors.Wrapf(PoolIsFull, fmt.Sprintf("Pool ledger already contains maximum number of active nodes: \"%v\"", MaxNodes)) } func NewErrProposedDisableValidatorAlreadyExists(name interface{}) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrProposedDisableValidatorAlreadyExists, "Disable proposal with validator address=%v already exists on the ledger", name, @@ -42,7 +42,7 @@ func NewErrProposedDisableValidatorAlreadyExists(name interface{}) error { } func NewErrProposedDisableValidatorDoesNotExist(name interface{}) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrProposedDisableValidatorDoesNotExist, "Disable proposal with validator address=%v does not exist on the ledger", name, @@ -50,7 +50,7 @@ func NewErrProposedDisableValidatorDoesNotExist(name interface{}) error { } func NewErrDisabledValidatorAlreadyExists(name interface{}) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrDisabledValidatorAlreadytExists, "Disabled validator with address=%v already exists on the ledger", name, @@ -58,7 +58,7 @@ func NewErrDisabledValidatorAlreadyExists(name interface{}) error { } func NewErrDisabledValidatorDoesNotExist(name interface{}) error { - return sdkerrors.Wrapf( + return errors.Wrapf( ErrDisabledValidatorDoesNotExist, "Disabled validator with address=%v does not exist on the ledger", name, diff --git a/x/validator/types/expected_keepers.go b/x/validator/types/expected_keepers.go index 62950d893..963dde3f0 100644 --- a/x/validator/types/expected_keepers.go +++ b/x/validator/types/expected_keepers.go @@ -13,6 +13,6 @@ type DclauthKeeper interface { SetRevokedAccount(ctx sdk.Context, revokedAccount dclauthtypes.RevokedAccount) RemoveAccount(ctx sdk.Context, address sdk.AccAddress) AddAccountToRevokedAccount( - ctx sdk.Context, accAddr sdk.AccAddress, approvals []*dclauthtypes.Grant, reason dclauthtypes.RevokedAccount_Reason, + ctx sdk.Context, accAddr sdk.AccAddress, approvals []*dclauthtypes.Grant, reason dclauthtypes.RevokedAccount_Reason, //nolint:nosnakecase ) (*dclauthtypes.RevokedAccount, error) } diff --git a/x/validator/types/genesis.pb.go b/x/validator/types/genesis.pb.go index f423d08df..e8a45a1f5 100644 --- a/x/validator/types/genesis.pb.go +++ b/x/validator/types/genesis.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: validator/genesis.proto +// source: zigbeealliance/distributedcomplianceledger/validator/genesis.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -36,7 +36,7 @@ func (m *GenesisState) Reset() { *m = GenesisState{} } func (m *GenesisState) String() string { return proto.CompactTextString(m) } func (*GenesisState) ProtoMessage() {} func (*GenesisState) Descriptor() ([]byte, []int) { - return fileDescriptor_8143c6ee7ddaa59a, []int{0} + return fileDescriptor_9c20c3934fbc6f83, []int{0} } func (m *GenesisState) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -104,35 +104,37 @@ func init() { proto.RegisterType((*GenesisState)(nil), "zigbeealliance.distributedcomplianceledger.validator.GenesisState") } -func init() { proto.RegisterFile("validator/genesis.proto", fileDescriptor_8143c6ee7ddaa59a) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/validator/genesis.proto", fileDescriptor_9c20c3934fbc6f83) +} -var fileDescriptor_8143c6ee7ddaa59a = []byte{ +var fileDescriptor_9c20c3934fbc6f83 = []byte{ // 393 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x2f, 0x4b, 0xcc, 0xc9, - 0x4c, 0x49, 0x2c, 0xc9, 0x2f, 0xd2, 0x4f, 0x4f, 0xcd, 0x4b, 0x2d, 0xce, 0x2c, 0xd6, 0x2b, 0x28, - 0xca, 0x2f, 0xc9, 0x17, 0x32, 0xa9, 0xca, 0x4c, 0x4f, 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, - 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, - 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, 0xa7, 0x16, 0xe9, 0xc1, 0xcd, 0x90, 0x92, - 0x44, 0x18, 0x07, 0x67, 0x41, 0x0c, 0x94, 0x52, 0x41, 0x48, 0xe5, 0x24, 0x16, 0x97, 0xc4, 0xc3, - 0xb9, 0xf1, 0x05, 0xf9, 0xe5, 0xa9, 0x30, 0x55, 0x5a, 0x08, 0x55, 0x05, 0x45, 0xf9, 0x05, 0xf9, - 0xc5, 0xa9, 0x29, 0xf1, 0x29, 0x99, 0xc5, 0x89, 0x49, 0x39, 0xa9, 0xf1, 0xe8, 0x26, 0x2a, 0x21, - 0xd4, 0x42, 0x95, 0xa4, 0xe0, 0x53, 0x53, 0x94, 0x9a, 0x95, 0x9a, 0x5c, 0x82, 0x45, 0x8d, 0x48, - 0x7a, 0x7e, 0x7a, 0x3e, 0x98, 0xa9, 0x0f, 0x62, 0x41, 0x44, 0x95, 0x36, 0xb1, 0x72, 0xf1, 0xb8, - 0x43, 0x82, 0x24, 0xb8, 0x24, 0xb1, 0x24, 0x55, 0x28, 0x9b, 0x8b, 0x17, 0xae, 0xd3, 0x27, 0xb3, - 0xb8, 0x44, 0x82, 0x51, 0x81, 0x59, 0x83, 0xdb, 0xc8, 0x5e, 0x8f, 0x9c, 0x90, 0xd2, 0x0b, 0x83, - 0xb1, 0x9c, 0x58, 0x4e, 0xdc, 0x93, 0x67, 0x08, 0x42, 0x35, 0x5b, 0xa8, 0x8d, 0x91, 0x4b, 0x0c, - 0x14, 0x4c, 0x70, 0x65, 0x01, 0xa0, 0x40, 0x02, 0x5b, 0xcb, 0x04, 0xb6, 0xd6, 0x83, 0x3c, 0x6b, - 0x7d, 0x30, 0xcc, 0x84, 0xda, 0x8f, 0xc3, 0x36, 0xa1, 0x19, 0x8c, 0x5c, 0x32, 0xb0, 0x98, 0x70, - 0x81, 0x84, 0x72, 0x18, 0x4a, 0x28, 0x30, 0x83, 0x9d, 0xe3, 0x47, 0x9e, 0x73, 0x02, 0x70, 0x98, - 0x0c, 0x75, 0x14, 0x5e, 0x9b, 0x85, 0x9a, 0x19, 0xb9, 0x44, 0x61, 0x11, 0x8f, 0xea, 0x26, 0x16, - 0xb0, 0x9b, 0xdc, 0xc9, 0x73, 0x93, 0x0b, 0xba, 0x91, 0x50, 0xc7, 0x60, 0xb7, 0x4b, 0xa8, 0x8b, - 0x91, 0x4b, 0x14, 0x96, 0xb4, 0x50, 0x5d, 0xc1, 0x4a, 0x49, 0xc8, 0x04, 0x41, 0x8d, 0xc4, 0x11, - 0x32, 0xd8, 0xad, 0x74, 0x4a, 0x39, 0xf1, 0x48, 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, - 0xe4, 0x18, 0x27, 0x3c, 0x96, 0x63, 0xb8, 0xf0, 0x58, 0x8e, 0xe1, 0xc6, 0x63, 0x39, 0x86, 0x28, - 0xaf, 0xf4, 0xcc, 0x92, 0x8c, 0xd2, 0x24, 0xbd, 0xe4, 0xfc, 0x5c, 0x7d, 0x88, 0x83, 0x74, 0x61, - 0x2e, 0xd2, 0x47, 0x72, 0x91, 0x2e, 0xc2, 0x49, 0xba, 0x10, 0x37, 0xe9, 0x57, 0x20, 0xb2, 0xb2, - 0x7e, 0x49, 0x65, 0x41, 0x6a, 0x71, 0x12, 0x1b, 0x38, 0x87, 0x18, 0x03, 0x02, 0x00, 0x00, 0xff, - 0xff, 0x36, 0x1c, 0xd4, 0x18, 0x3d, 0x04, 0x00, 0x00, + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x93, 0x3d, 0x4b, 0xc3, 0x40, + 0x18, 0xc7, 0x73, 0xb6, 0x75, 0x88, 0xba, 0x04, 0x2b, 0x52, 0x24, 0x8a, 0x93, 0x4b, 0x13, 0x50, + 0x77, 0xa1, 0x14, 0x2a, 0x52, 0xb5, 0x54, 0xec, 0xe0, 0x12, 0x2e, 0xb9, 0x87, 0x78, 0x9a, 0xf6, + 0x42, 0xee, 0xea, 0xdb, 0x2a, 0x38, 0x38, 0x39, 0xfa, 0x39, 0xfc, 0x14, 0x1d, 0x3b, 0x3a, 0x89, + 0xb4, 0x5f, 0x44, 0x9a, 0x37, 0x4d, 0x4d, 0x3b, 0x9c, 0x6e, 0x4f, 0x8f, 0xe6, 0xf7, 0xff, 0xf1, + 0xbc, 0xa8, 0xb5, 0x07, 0xea, 0xda, 0x00, 0xd8, 0xf3, 0x28, 0xee, 0x39, 0x60, 0x12, 0xca, 0x45, + 0x40, 0xed, 0xbe, 0x00, 0xe2, 0xb0, 0xae, 0x1f, 0xbd, 0x7a, 0x40, 0x5c, 0x08, 0xcc, 0x1b, 0xec, + 0x51, 0x82, 0x05, 0x0b, 0x4c, 0x17, 0x7a, 0xc0, 0x29, 0x37, 0xfc, 0x80, 0x09, 0xa6, 0xed, 0x67, + 0x19, 0xc6, 0x1c, 0x86, 0x91, 0x32, 0x2a, 0x75, 0xa9, 0xe4, 0xb4, 0x8a, 0xb2, 0x2b, 0xa7, 0x52, + 0x14, 0x0f, 0x73, 0x61, 0xa5, 0x3f, 0x2d, 0x9f, 0xdd, 0x42, 0x02, 0x3c, 0x97, 0x02, 0xfa, 0x01, + 0xf3, 0x19, 0x07, 0x62, 0x11, 0xca, 0xb1, 0xed, 0x81, 0x35, 0xed, 0x79, 0x2c, 0x85, 0x8d, 0x69, + 0xe4, 0x9f, 0x70, 0x01, 0x5c, 0x81, 0x23, 0x72, 0x70, 0xab, 0x2e, 0x73, 0x59, 0x58, 0x9a, 0x93, + 0x2a, 0x7a, 0xdd, 0x7e, 0x2b, 0xa9, 0xcb, 0x8d, 0x68, 0xd2, 0x67, 0x02, 0x0b, 0xd0, 0xae, 0xd5, + 0x95, 0xf4, 0xcb, 0x26, 0xe5, 0x62, 0x1d, 0x6d, 0x15, 0x76, 0x96, 0x76, 0x0f, 0x0c, 0x99, 0x05, + 0x30, 0x3a, 0x49, 0x55, 0x2b, 0x0e, 0x3e, 0x36, 0x95, 0x76, 0x96, 0xad, 0x3d, 0x21, 0x75, 0x6d, + 0x32, 0xa7, 0xf4, 0x6f, 0xad, 0xc9, 0x94, 0xc2, 0xd8, 0x85, 0x30, 0xf6, 0x50, 0x2e, 0xb6, 0xf9, + 0x8b, 0x19, 0xe7, 0xcf, 0x48, 0xd3, 0x5e, 0x91, 0xba, 0x91, 0xcc, 0xb7, 0x1e, 0x0d, 0xa4, 0x93, + 0xe9, 0x42, 0x21, 0xd4, 0x39, 0x91, 0xd3, 0x69, 0xcd, 0x20, 0xc7, 0x52, 0x73, 0x93, 0xb5, 0x47, + 0xa4, 0x96, 0x93, 0x1d, 0xc9, 0x3a, 0x15, 0x43, 0xa7, 0x86, 0x9c, 0x53, 0x7d, 0x1a, 0x19, 0xcb, + 0xe4, 0x67, 0x69, 0xcf, 0x48, 0x2d, 0x27, 0xab, 0x95, 0xb5, 0x28, 0xfd, 0xa5, 0x33, 0xed, 0x18, + 0x39, 0xa3, 0x33, 0xf9, 0x91, 0x35, 0x32, 0x18, 0xe9, 0x68, 0x38, 0xd2, 0xd1, 0xe7, 0x48, 0x47, + 0x2f, 0x63, 0x5d, 0x19, 0x8e, 0x75, 0xe5, 0x7d, 0xac, 0x2b, 0x17, 0x47, 0x2e, 0x15, 0x97, 0x7d, + 0xdb, 0x70, 0x58, 0xd7, 0x8c, 0x84, 0xaa, 0x79, 0xf7, 0x53, 0xfd, 0x56, 0xaa, 0xc6, 0x17, 0x74, + 0xf7, 0xe3, 0x86, 0xc4, 0xbd, 0x0f, 0xdc, 0x5e, 0x0c, 0x2f, 0x64, 0xef, 0x2b, 0x00, 0x00, 0xff, + 0xff, 0x1d, 0xb3, 0x3d, 0x53, 0x3f, 0x05, 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { diff --git a/x/validator/types/grant.pb.go b/x/validator/types/grant.pb.go index 7ebe37b3d..f944d8677 100644 --- a/x/validator/types/grant.pb.go +++ b/x/validator/types/grant.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: validator/grant.proto +// source: zigbeealliance/distributedcomplianceledger/validator/grant.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -33,7 +33,7 @@ func (m *Grant) Reset() { *m = Grant{} } func (m *Grant) String() string { return proto.CompactTextString(m) } func (*Grant) ProtoMessage() {} func (*Grant) Descriptor() ([]byte, []int) { - return fileDescriptor_c52d4dfaf8316eeb, []int{0} + return fileDescriptor_8991a374e1961121, []int{0} } func (m *Grant) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -87,26 +87,28 @@ func init() { proto.RegisterType((*Grant)(nil), "zigbeealliance.distributedcomplianceledger.validator.Grant") } -func init() { proto.RegisterFile("validator/grant.proto", fileDescriptor_c52d4dfaf8316eeb) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/validator/grant.proto", fileDescriptor_8991a374e1961121) +} -var fileDescriptor_c52d4dfaf8316eeb = []byte{ - // 251 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x2d, 0x4b, 0xcc, 0xc9, - 0x4c, 0x49, 0x2c, 0xc9, 0x2f, 0xd2, 0x4f, 0x2f, 0x4a, 0xcc, 0x2b, 0xd1, 0x2b, 0x28, 0xca, 0x2f, - 0xc9, 0x17, 0x32, 0xa9, 0xca, 0x4c, 0x4f, 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, - 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, - 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, 0xa7, 0x16, 0xe9, 0xc1, 0x4d, 0x90, 0x92, 0x4c, 0xce, - 0x2f, 0xce, 0xcd, 0x2f, 0x8e, 0x07, 0x9b, 0xa1, 0x0f, 0xe1, 0x40, 0x0c, 0x54, 0x4a, 0xe6, 0x62, - 0x75, 0x07, 0x99, 0x2f, 0x64, 0xc4, 0xc5, 0x9e, 0x98, 0x92, 0x52, 0x94, 0x5a, 0x5c, 0x2c, 0xc1, - 0xa8, 0xc0, 0xa8, 0xc1, 0xe9, 0x24, 0x71, 0x69, 0x8b, 0xae, 0x08, 0x54, 0xad, 0x23, 0x44, 0x26, - 0xb8, 0xa4, 0x28, 0x33, 0x2f, 0x3d, 0x08, 0xa6, 0x50, 0x48, 0x88, 0x8b, 0xa5, 0x24, 0x33, 0x37, - 0x55, 0x82, 0x49, 0x81, 0x51, 0x83, 0x39, 0x08, 0xcc, 0x06, 0x89, 0x65, 0xe6, 0xa5, 0xe5, 0x4b, - 0x30, 0x83, 0x0c, 0x09, 0x02, 0xb3, 0x9d, 0x52, 0x4e, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, - 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x09, 0x8f, 0xe5, 0x18, 0x2e, 0x3c, 0x96, 0x63, 0xb8, 0xf1, 0x58, - 0x8e, 0x21, 0xca, 0x2b, 0x3d, 0xb3, 0x24, 0xa3, 0x34, 0x49, 0x2f, 0x39, 0x3f, 0x57, 0x1f, 0xe2, - 0x35, 0x5d, 0x98, 0xdf, 0xf4, 0x91, 0xfc, 0xa6, 0x8b, 0xf0, 0x9c, 0x2e, 0xc4, 0x77, 0xfa, 0x15, - 0xfa, 0x88, 0x10, 0x2a, 0xa9, 0x2c, 0x48, 0x2d, 0x4e, 0x62, 0x03, 0xfb, 0xc8, 0x18, 0x10, 0x00, - 0x00, 0xff, 0xff, 0xa6, 0x9d, 0xa7, 0x32, 0x3b, 0x01, 0x00, 0x00, +var fileDescriptor_8991a374e1961121 = []byte{ + // 253 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0xa8, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x97, 0x25, 0xe6, 0x64, 0xa6, 0x24, 0x96, 0xe4, 0x17, 0xe9, 0xa7, 0x17, 0x25, + 0xe6, 0x95, 0xe8, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x99, 0xa0, 0x9a, 0xa0, 0x87, 0xc7, 0x04, + 0x3d, 0xb8, 0x09, 0x52, 0x92, 0xc9, 0xf9, 0xc5, 0xb9, 0xf9, 0xc5, 0xf1, 0x60, 0x33, 0xf4, 0x21, + 0x1c, 0x88, 0x81, 0x4a, 0xc9, 0x5c, 0xac, 0xee, 0x20, 0xf3, 0x85, 0x8c, 0xb8, 0xd8, 0x13, 0x53, + 0x52, 0x8a, 0x52, 0x8b, 0x8b, 0x25, 0x18, 0x15, 0x18, 0x35, 0x38, 0x9d, 0x24, 0x2e, 0x6d, 0xd1, + 0x15, 0x81, 0xaa, 0x75, 0x84, 0xc8, 0x04, 0x97, 0x14, 0x65, 0xe6, 0xa5, 0x07, 0xc1, 0x14, 0x0a, + 0x09, 0x71, 0xb1, 0x94, 0x64, 0xe6, 0xa6, 0x4a, 0x30, 0x29, 0x30, 0x6a, 0x30, 0x07, 0x81, 0xd9, + 0x20, 0xb1, 0xcc, 0xbc, 0xb4, 0x7c, 0x09, 0x66, 0x90, 0x21, 0x41, 0x60, 0xb6, 0x53, 0xca, 0x89, + 0x47, 0x72, 0x8c, 0x17, 0x1e, 0xc9, 0x31, 0x3e, 0x78, 0x24, 0xc7, 0x38, 0xe1, 0xb1, 0x1c, 0xc3, + 0x85, 0xc7, 0x72, 0x0c, 0x37, 0x1e, 0xcb, 0x31, 0x44, 0x79, 0xa5, 0x67, 0x96, 0x64, 0x94, 0x26, + 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0x43, 0xbc, 0xa6, 0x8b, 0x2d, 0x74, 0x74, 0x11, 0x9e, 0xd3, 0x85, + 0x86, 0x4f, 0x05, 0x52, 0x08, 0x95, 0x54, 0x16, 0xa4, 0x16, 0x27, 0xb1, 0x81, 0x7d, 0x64, 0x0c, + 0x08, 0x00, 0x00, 0xff, 0xff, 0x9f, 0xa9, 0xc7, 0x85, 0x66, 0x01, 0x00, 0x00, } func (m *Grant) Marshal() (dAtA []byte, err error) { diff --git a/x/validator/types/last_validator_power.pb.go b/x/validator/types/last_validator_power.pb.go index aa441be3d..1841b0f21 100644 --- a/x/validator/types/last_validator_power.pb.go +++ b/x/validator/types/last_validator_power.pb.go @@ -1,13 +1,13 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: validator/last_validator_power.proto +// source: zigbeealliance/distributedcomplianceledger/validator/last_validator_power.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -32,7 +32,7 @@ type LastValidatorPower struct { func (m *LastValidatorPower) Reset() { *m = LastValidatorPower{} } func (*LastValidatorPower) ProtoMessage() {} func (*LastValidatorPower) Descriptor() ([]byte, []int) { - return fileDescriptor_830cb29c5288b722, []int{0} + return fileDescriptor_9fb3d0fe36b404c9, []int{0} } func (m *LastValidatorPower) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -66,29 +66,29 @@ func init() { } func init() { - proto.RegisterFile("validator/last_validator_power.proto", fileDescriptor_830cb29c5288b722) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/validator/last_validator_power.proto", fileDescriptor_9fb3d0fe36b404c9) } -var fileDescriptor_830cb29c5288b722 = []byte{ - // 280 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x29, 0x4b, 0xcc, 0xc9, - 0x4c, 0x49, 0x2c, 0xc9, 0x2f, 0xd2, 0xcf, 0x49, 0x2c, 0x2e, 0x89, 0x87, 0x73, 0xe3, 0x0b, 0xf2, - 0xcb, 0x53, 0x8b, 0xf4, 0x0a, 0x8a, 0xf2, 0x4b, 0xf2, 0x85, 0x4c, 0xaa, 0x32, 0xd3, 0x93, 0x52, - 0x53, 0x13, 0x73, 0x72, 0x32, 0x13, 0xf3, 0x92, 0x53, 0xf5, 0x52, 0x32, 0x8b, 0x4b, 0x8a, 0x32, - 0x93, 0x4a, 0x4b, 0x52, 0x53, 0x92, 0xf3, 0x73, 0x0b, 0x20, 0xa2, 0x39, 0xa9, 0x29, 0xe9, 0xa9, - 0x45, 0x7a, 0x70, 0x13, 0xa4, 0x44, 0xd2, 0xf3, 0xd3, 0xf3, 0xc1, 0x06, 0xe8, 0x83, 0x58, 0x10, - 0xb3, 0xa4, 0x24, 0x93, 0xf3, 0x8b, 0x73, 0xf3, 0x8b, 0xe3, 0x21, 0x12, 0x10, 0x0e, 0x44, 0x4a, - 0x29, 0x83, 0x4b, 0xc8, 0x27, 0xb1, 0xb8, 0x24, 0x0c, 0x66, 0x42, 0x00, 0xc8, 0x09, 0x42, 0x7a, - 0x5c, 0xac, 0xf9, 0xe5, 0x79, 0xa9, 0x45, 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0x9c, 0x4e, 0x12, 0x97, - 0xb6, 0xe8, 0x8a, 0x40, 0xb5, 0x39, 0xa6, 0xa4, 0x14, 0xa5, 0x16, 0x17, 0x07, 0x97, 0x14, 0x65, - 0xe6, 0xa5, 0x07, 0x41, 0x94, 0x09, 0x89, 0x70, 0xb1, 0x82, 0xdd, 0x2e, 0xc1, 0xa4, 0xc0, 0xa8, - 0xc1, 0x1a, 0x04, 0xe1, 0x58, 0xf1, 0x74, 0x2c, 0x90, 0x67, 0x98, 0xb1, 0x40, 0x9e, 0xe1, 0xc5, - 0x02, 0x79, 0x06, 0xa7, 0x94, 0x13, 0x8f, 0xe4, 0x18, 0x2f, 0x3c, 0x92, 0x63, 0x7c, 0xf0, 0x48, - 0x8e, 0x71, 0xc2, 0x63, 0x39, 0x86, 0x0b, 0x8f, 0xe5, 0x18, 0x6e, 0x3c, 0x96, 0x63, 0x88, 0xf2, - 0x4a, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x87, 0xf8, 0x5a, 0x17, 0xe6, - 0x6d, 0x7d, 0x24, 0x6f, 0xeb, 0x22, 0xfc, 0xad, 0x0b, 0xf1, 0xb8, 0x7e, 0x85, 0x3e, 0x22, 0x2c, - 0x4b, 0x2a, 0x0b, 0x52, 0x8b, 0x93, 0xd8, 0xc0, 0xde, 0x32, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, - 0x68, 0x1e, 0x4e, 0x66, 0x65, 0x01, 0x00, 0x00, +var fileDescriptor_9fb3d0fe36b404c9 = []byte{ + // 282 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xf2, 0xaf, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x97, 0x25, 0xe6, 0x64, 0xa6, 0x24, 0x96, 0xe4, 0x17, 0xe9, 0xe7, 0x24, 0x16, + 0x97, 0xc4, 0xc3, 0xb9, 0xf1, 0x05, 0xf9, 0xe5, 0xa9, 0x45, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, + 0x42, 0x26, 0xa8, 0x06, 0xea, 0xe1, 0x31, 0x50, 0x0f, 0x6e, 0x82, 0x94, 0x48, 0x7a, 0x7e, 0x7a, + 0x3e, 0xd8, 0x00, 0x7d, 0x10, 0x0b, 0x62, 0x96, 0x94, 0x64, 0x72, 0x7e, 0x71, 0x6e, 0x7e, 0x71, + 0x3c, 0x44, 0x02, 0xc2, 0x81, 0x48, 0x29, 0x65, 0x70, 0x09, 0xf9, 0x24, 0x16, 0x97, 0x84, 0xc1, + 0x4c, 0x08, 0x00, 0x39, 0x41, 0x48, 0x8f, 0x8b, 0x35, 0xbf, 0x3c, 0x2f, 0xb5, 0x48, 0x82, 0x51, + 0x81, 0x51, 0x83, 0xd3, 0x49, 0xe2, 0xd2, 0x16, 0x5d, 0x11, 0xa8, 0x36, 0xc7, 0x94, 0x94, 0xa2, + 0xd4, 0xe2, 0xe2, 0xe0, 0x92, 0xa2, 0xcc, 0xbc, 0xf4, 0x20, 0x88, 0x32, 0x21, 0x11, 0x2e, 0x56, + 0xb0, 0xdb, 0x25, 0x98, 0x14, 0x18, 0x35, 0x58, 0x83, 0x20, 0x1c, 0x2b, 0x9e, 0x8e, 0x05, 0xf2, + 0x0c, 0x33, 0x16, 0xc8, 0x33, 0xbc, 0x58, 0x20, 0xcf, 0xe0, 0x94, 0x72, 0xe2, 0x91, 0x1c, 0xe3, + 0x85, 0x47, 0x72, 0x8c, 0x0f, 0x1e, 0xc9, 0x31, 0x4e, 0x78, 0x2c, 0xc7, 0x70, 0xe1, 0xb1, 0x1c, + 0xc3, 0x8d, 0xc7, 0x72, 0x0c, 0x51, 0x5e, 0xe9, 0x99, 0x25, 0x19, 0xa5, 0x49, 0x7a, 0xc9, 0xf9, + 0xb9, 0xfa, 0x10, 0x5f, 0xeb, 0x62, 0x0b, 0x47, 0x5d, 0x84, 0xbf, 0x75, 0xa1, 0x21, 0x59, 0x81, + 0x14, 0x96, 0x25, 0x95, 0x05, 0xa9, 0xc5, 0x49, 0x6c, 0x60, 0x6f, 0x19, 0x03, 0x02, 0x00, 0x00, + 0xff, 0xff, 0xdf, 0x44, 0x3d, 0x14, 0x90, 0x01, 0x00, 0x00, } func (m *LastValidatorPower) Marshal() (dAtA []byte, err error) { diff --git a/x/validator/types/message_approve_disable_validator.go b/x/validator/types/message_approve_disable_validator.go index 2581a3311..4ff002212 100644 --- a/x/validator/types/message_approve_disable_validator.go +++ b/x/validator/types/message_approve_disable_validator.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" @@ -47,12 +48,12 @@ func (msg *MsgApproveDisableValidator) GetSignBytes() []byte { func (msg *MsgApproveDisableValidator) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } _, err = sdk.ValAddressFromBech32(msg.Address) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid validator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid validator address (%s)", err) } err = validator.Validate(msg) diff --git a/x/validator/types/message_create_validator.go b/x/validator/types/message_create_validator.go index a3c61c337..9862466b2 100644 --- a/x/validator/types/message_create_validator.go +++ b/x/validator/types/message_create_validator.go @@ -1,10 +1,12 @@ package types import ( + "cosmossdk.io/errors" codectypes "github.com/cosmos/cosmos-sdk/codec/types" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" ) @@ -60,24 +62,24 @@ func (msg *MsgCreateValidator) GetSignBytes() []byte { func (msg *MsgCreateValidator) ValidateBasic() error { accAddr, err := sdk.ValAddressFromBech32(msg.Signer) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid signer address (%s)", err) } if accAddr.Empty() { - return sdkerrors.Wrap(sdkerrors.ErrInvalidAddress, "Invalid Signer: it cannot be empty") + return errors.Wrap(sdkerrors.ErrInvalidAddress, "Invalid Signer: it cannot be empty") } if msg.PubKey == nil { - return sdkerrors.Wrap(sdkerrors.ErrInvalidPubKey, "Invalid Validator PubKey: it cannot be empty") + return errors.Wrap(sdkerrors.ErrInvalidPubKey, "Invalid Validator PubKey: it cannot be empty") } _, err2 := msg.PubKey.GetCachedValue().(cryptotypes.PubKey) if !err2 { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidType, "expecting cryptotypes.PubKey for PubKey, got %T", err) + return errors.Wrapf(sdkerrors.ErrInvalidType, "expecting cryptotypes.PubKey for PubKey, got %T", err) } if msg.Description == (Description{}) { - return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "empty description") + return errors.Wrap(sdkerrors.ErrInvalidRequest, "empty description") } if err := msg.Description.Validate(); err != nil { diff --git a/x/validator/types/message_disable_validator.go b/x/validator/types/message_disable_validator.go index 47eb23b73..5c6141962 100644 --- a/x/validator/types/message_disable_validator.go +++ b/x/validator/types/message_disable_validator.go @@ -1,6 +1,7 @@ package types import ( + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" @@ -42,7 +43,7 @@ func (msg *MsgDisableValidator) GetSignBytes() []byte { func (msg *MsgDisableValidator) ValidateBasic() error { _, err := sdk.ValAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid validator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid validator address (%s)", err) } err = validator.Validate(msg) diff --git a/x/validator/types/message_enable_validator.go b/x/validator/types/message_enable_validator.go index 4dac7bbd2..9ada276fa 100644 --- a/x/validator/types/message_enable_validator.go +++ b/x/validator/types/message_enable_validator.go @@ -1,6 +1,7 @@ package types import ( + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" @@ -42,7 +43,7 @@ func (msg *MsgEnableValidator) GetSignBytes() []byte { func (msg *MsgEnableValidator) ValidateBasic() error { _, err := sdk.ValAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid validator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid validator address (%s)", err) } err = validator.Validate(msg) diff --git a/x/validator/types/message_propose_disable_validator.go b/x/validator/types/message_propose_disable_validator.go index ba56fda54..344e95188 100644 --- a/x/validator/types/message_propose_disable_validator.go +++ b/x/validator/types/message_propose_disable_validator.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" @@ -47,12 +48,12 @@ func (msg *MsgProposeDisableValidator) GetSignBytes() []byte { func (msg *MsgProposeDisableValidator) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } _, err = sdk.ValAddressFromBech32(msg.Address) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid validator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid validator address (%s)", err) } err = validator.Validate(msg) diff --git a/x/validator/types/message_reject_disable_node.go b/x/validator/types/message_reject_disable_node.go index 61c7824ad..e458dbbb3 100644 --- a/x/validator/types/message_reject_disable_node.go +++ b/x/validator/types/message_reject_disable_node.go @@ -3,6 +3,7 @@ package types import ( "time" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" ) @@ -46,7 +47,7 @@ func (msg *MsgRejectDisableValidator) GetSignBytes() []byte { func (msg *MsgRejectDisableValidator) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } return nil diff --git a/x/validator/types/proposed_disable_validator.pb.go b/x/validator/types/proposed_disable_validator.pb.go index 042a1ae3d..73aa60b09 100644 --- a/x/validator/types/proposed_disable_validator.pb.go +++ b/x/validator/types/proposed_disable_validator.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: validator/proposed_disable_validator.proto +// source: zigbeealliance/distributedcomplianceledger/validator/proposed_disable_validator.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -34,7 +34,7 @@ func (m *ProposedDisableValidator) Reset() { *m = ProposedDisableValidat func (m *ProposedDisableValidator) String() string { return proto.CompactTextString(m) } func (*ProposedDisableValidator) ProtoMessage() {} func (*ProposedDisableValidator) Descriptor() ([]byte, []int) { - return fileDescriptor_f7ffedaeb03ca643, []int{0} + return fileDescriptor_f9591474ec756c15, []int{0} } func (m *ProposedDisableValidator) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -96,31 +96,31 @@ func init() { } func init() { - proto.RegisterFile("validator/proposed_disable_validator.proto", fileDescriptor_f7ffedaeb03ca643) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/validator/proposed_disable_validator.proto", fileDescriptor_f9591474ec756c15) } -var fileDescriptor_f7ffedaeb03ca643 = []byte{ - // 311 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x91, 0xc1, 0x4a, 0x03, 0x31, - 0x10, 0x86, 0xbb, 0xad, 0x58, 0xba, 0xde, 0x16, 0x85, 0xd8, 0x43, 0x28, 0x9e, 0x8a, 0xb0, 0x09, - 0x54, 0x6f, 0x9e, 0x2c, 0x82, 0xe0, 0x49, 0x2a, 0x0a, 0x7a, 0x29, 0xd9, 0x64, 0x58, 0x23, 0x69, - 0x13, 0x92, 0xb4, 0xa8, 0x4f, 0xe1, 0xc3, 0xf4, 0x21, 0x3c, 0x16, 0x4f, 0x1e, 0xa5, 0x7d, 0x11, - 0xd9, 0x66, 0xb7, 0xeb, 0x4d, 0xd0, 0xe3, 0xce, 0xcc, 0xff, 0x7d, 0xb3, 0x99, 0xf8, 0x78, 0xce, - 0x94, 0x14, 0xcc, 0x6b, 0x4b, 0x8d, 0xd5, 0x46, 0x3b, 0x10, 0x63, 0x21, 0x1d, 0xcb, 0x14, 0x8c, - 0xb7, 0x2d, 0x62, 0xac, 0xf6, 0x3a, 0x39, 0x7d, 0x95, 0x79, 0x06, 0xc0, 0x94, 0x92, 0x6c, 0xca, - 0x81, 0x08, 0xe9, 0xbc, 0x95, 0xd9, 0xcc, 0x83, 0xe0, 0x7a, 0x62, 0x42, 0x55, 0x81, 0xc8, 0xc1, - 0x92, 0x6d, 0xb6, 0x7b, 0xc8, 0xb5, 0x9b, 0x68, 0x37, 0xde, 0x30, 0x68, 0xf8, 0x08, 0xc0, 0xee, - 0x41, 0x2d, 0xcf, 0x2d, 0x9b, 0xfa, 0x50, 0x3e, 0x5a, 0x34, 0x63, 0x74, 0x5d, 0x2e, 0x73, 0x11, - 0x76, 0xb9, 0xab, 0x06, 0x93, 0x41, 0xdc, 0x66, 0x42, 0x58, 0x70, 0x0e, 0x45, 0xbd, 0xa8, 0xdf, - 0x19, 0xa2, 0x8f, 0x45, 0xba, 0x5f, 0x62, 0xcf, 0x43, 0xe7, 0xc6, 0x5b, 0x39, 0xcd, 0x47, 0xd5, - 0x60, 0x91, 0xe1, 0x16, 0x8a, 0x38, 0x6a, 0xfe, 0x96, 0x29, 0x07, 0x93, 0xfb, 0xb8, 0xc3, 0x8c, - 0xb1, 0x7a, 0xce, 0x94, 0x43, 0xad, 0x5e, 0xab, 0xbf, 0x37, 0x38, 0x23, 0x7f, 0x79, 0x00, 0x72, - 0x59, 0xfc, 0xda, 0xa8, 0xa6, 0x25, 0xb7, 0x71, 0xdb, 0xc2, 0x13, 0x70, 0xef, 0xd0, 0xce, 0xff, - 0xc1, 0x15, 0x6b, 0x28, 0xde, 0x57, 0x38, 0x5a, 0xae, 0x70, 0xf4, 0xb5, 0xc2, 0xd1, 0xdb, 0x1a, - 0x37, 0x96, 0x6b, 0xdc, 0xf8, 0x5c, 0xe3, 0xc6, 0xc3, 0x55, 0x2e, 0xfd, 0xe3, 0x2c, 0x23, 0x5c, - 0x4f, 0x68, 0x30, 0xa5, 0x95, 0x8a, 0xfe, 0x50, 0xa5, 0xb5, 0x2b, 0x0d, 0x32, 0xfa, 0x4c, 0xeb, - 0x13, 0xf9, 0x17, 0x03, 0x2e, 0xdb, 0xdd, 0xdc, 0xe8, 0xe4, 0x3b, 0x00, 0x00, 0xff, 0xff, 0xb6, - 0xd3, 0xb2, 0xf7, 0x39, 0x02, 0x00, 0x00, +var fileDescriptor_f9591474ec756c15 = []byte{ + // 316 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x91, 0xb1, 0x4a, 0x03, 0x31, + 0x1c, 0xc6, 0x7b, 0xad, 0x58, 0x7a, 0x6e, 0x87, 0x43, 0xec, 0x10, 0x8a, 0x53, 0x97, 0xcb, 0x41, + 0x75, 0x73, 0xd1, 0x22, 0x08, 0x4e, 0x52, 0xa9, 0xa0, 0x4b, 0xc9, 0x25, 0x7f, 0xce, 0x48, 0xda, + 0x84, 0x24, 0x2d, 0xea, 0x53, 0xf8, 0x30, 0x7d, 0x08, 0xc7, 0xe2, 0xe4, 0x28, 0xbd, 0x17, 0x91, + 0x6b, 0xee, 0x5a, 0x05, 0x51, 0xa8, 0x63, 0xf2, 0xff, 0xbe, 0xdf, 0xff, 0x4b, 0xbe, 0x70, 0xf8, + 0x2c, 0xb2, 0x14, 0x80, 0x4a, 0x29, 0xe8, 0x84, 0x41, 0xc2, 0x85, 0x75, 0x46, 0xa4, 0x53, 0x07, + 0x9c, 0xa9, 0xb1, 0xf6, 0xb7, 0x12, 0x78, 0x06, 0x26, 0x99, 0x51, 0x29, 0x38, 0x75, 0xca, 0x24, + 0xda, 0x28, 0xad, 0x2c, 0xf0, 0x11, 0x17, 0x96, 0xa6, 0x12, 0x46, 0xeb, 0x11, 0xd1, 0x46, 0x39, + 0x15, 0x1d, 0x7f, 0xc7, 0x92, 0x5f, 0xb0, 0x64, 0xed, 0x6d, 0x1f, 0x30, 0x65, 0xc7, 0xca, 0x8e, + 0x56, 0x8c, 0xc4, 0x1f, 0x3c, 0xb0, 0x7d, 0xba, 0x55, 0xce, 0xcc, 0xd0, 0x89, 0xf3, 0x84, 0xc3, + 0x79, 0x3d, 0x44, 0x57, 0x65, 0xee, 0x73, 0x1f, 0xfb, 0xa6, 0x12, 0x46, 0xbd, 0xb0, 0x49, 0x39, + 0x37, 0x60, 0x2d, 0x0a, 0x3a, 0x41, 0xb7, 0xd5, 0x47, 0x6f, 0xf3, 0x78, 0xbf, 0x4c, 0x70, 0xe6, + 0x27, 0xd7, 0xce, 0x88, 0x49, 0x36, 0xa8, 0x84, 0x85, 0x87, 0x19, 0x28, 0xec, 0xa8, 0xfe, 0x97, + 0xa7, 0x14, 0x46, 0xb7, 0x61, 0x8b, 0x6a, 0x6d, 0xd4, 0x8c, 0x4a, 0x8b, 0x1a, 0x9d, 0x46, 0x77, + 0xaf, 0x77, 0x42, 0xb6, 0xf9, 0x2b, 0x72, 0x51, 0x3c, 0x6d, 0xb0, 0xa1, 0x45, 0xc3, 0xb0, 0x69, + 0xe0, 0x01, 0x98, 0xb3, 0x68, 0xe7, 0xff, 0xe0, 0x8a, 0xd5, 0xe7, 0xaf, 0x4b, 0x1c, 0x2c, 0x96, + 0x38, 0xf8, 0x58, 0xe2, 0xe0, 0x25, 0xc7, 0xb5, 0x45, 0x8e, 0x6b, 0xef, 0x39, 0xae, 0xdd, 0x5d, + 0x66, 0xc2, 0xdd, 0x4f, 0x53, 0xc2, 0xd4, 0x38, 0xf1, 0x9b, 0xe2, 0x9f, 0xea, 0x89, 0x37, 0xbb, + 0xe2, 0xb2, 0xa0, 0xc7, 0x2f, 0x15, 0xb9, 0x27, 0x0d, 0x36, 0xdd, 0x5d, 0x75, 0x74, 0xf4, 0x19, + 0x00, 0x00, 0xff, 0xff, 0xd5, 0xca, 0x13, 0xb9, 0x8f, 0x02, 0x00, 0x00, } func (m *ProposedDisableValidator) Marshal() (dAtA []byte, err error) { diff --git a/x/validator/types/query.pb.go b/x/validator/types/query.pb.go index 00bae9a7a..66de9b69e 100644 --- a/x/validator/types/query.pb.go +++ b/x/validator/types/query.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: validator/query.proto +// source: zigbeealliance/distributedcomplianceledger/validator/query.proto package types @@ -8,9 +8,9 @@ import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" query "github.com/cosmos/cosmos-sdk/types/query" - _ "github.com/gogo/protobuf/gogoproto" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -39,7 +39,7 @@ func (m *QueryGetValidatorRequest) Reset() { *m = QueryGetValidatorReque func (m *QueryGetValidatorRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetValidatorRequest) ProtoMessage() {} func (*QueryGetValidatorRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{0} + return fileDescriptor_383eb6436ed732ef, []int{0} } func (m *QueryGetValidatorRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -83,7 +83,7 @@ func (m *QueryGetValidatorResponse) Reset() { *m = QueryGetValidatorResp func (m *QueryGetValidatorResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetValidatorResponse) ProtoMessage() {} func (*QueryGetValidatorResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{1} + return fileDescriptor_383eb6436ed732ef, []int{1} } func (m *QueryGetValidatorResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -127,7 +127,7 @@ func (m *QueryAllValidatorRequest) Reset() { *m = QueryAllValidatorReque func (m *QueryAllValidatorRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllValidatorRequest) ProtoMessage() {} func (*QueryAllValidatorRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{2} + return fileDescriptor_383eb6436ed732ef, []int{2} } func (m *QueryAllValidatorRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -172,7 +172,7 @@ func (m *QueryAllValidatorResponse) Reset() { *m = QueryAllValidatorResp func (m *QueryAllValidatorResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllValidatorResponse) ProtoMessage() {} func (*QueryAllValidatorResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{3} + return fileDescriptor_383eb6436ed732ef, []int{3} } func (m *QueryAllValidatorResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -223,7 +223,7 @@ func (m *QueryGetLastValidatorPowerRequest) Reset() { *m = QueryGetLastV func (m *QueryGetLastValidatorPowerRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetLastValidatorPowerRequest) ProtoMessage() {} func (*QueryGetLastValidatorPowerRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{4} + return fileDescriptor_383eb6436ed732ef, []int{4} } func (m *QueryGetLastValidatorPowerRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -267,7 +267,7 @@ func (m *QueryGetLastValidatorPowerResponse) Reset() { *m = QueryGetLast func (m *QueryGetLastValidatorPowerResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetLastValidatorPowerResponse) ProtoMessage() {} func (*QueryGetLastValidatorPowerResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{5} + return fileDescriptor_383eb6436ed732ef, []int{5} } func (m *QueryGetLastValidatorPowerResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -311,7 +311,7 @@ func (m *QueryAllLastValidatorPowerRequest) Reset() { *m = QueryAllLastV func (m *QueryAllLastValidatorPowerRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllLastValidatorPowerRequest) ProtoMessage() {} func (*QueryAllLastValidatorPowerRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{6} + return fileDescriptor_383eb6436ed732ef, []int{6} } func (m *QueryAllLastValidatorPowerRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -356,7 +356,7 @@ func (m *QueryAllLastValidatorPowerResponse) Reset() { *m = QueryAllLast func (m *QueryAllLastValidatorPowerResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllLastValidatorPowerResponse) ProtoMessage() {} func (*QueryAllLastValidatorPowerResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{7} + return fileDescriptor_383eb6436ed732ef, []int{7} } func (m *QueryAllLastValidatorPowerResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -409,7 +409,7 @@ func (m *QueryGetProposedDisableValidatorRequest) Reset() { func (m *QueryGetProposedDisableValidatorRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetProposedDisableValidatorRequest) ProtoMessage() {} func (*QueryGetProposedDisableValidatorRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{8} + return fileDescriptor_383eb6436ed732ef, []int{8} } func (m *QueryGetProposedDisableValidatorRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -455,7 +455,7 @@ func (m *QueryGetProposedDisableValidatorResponse) Reset() { func (m *QueryGetProposedDisableValidatorResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetProposedDisableValidatorResponse) ProtoMessage() {} func (*QueryGetProposedDisableValidatorResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{9} + return fileDescriptor_383eb6436ed732ef, []int{9} } func (m *QueryGetProposedDisableValidatorResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -501,7 +501,7 @@ func (m *QueryAllProposedDisableValidatorRequest) Reset() { func (m *QueryAllProposedDisableValidatorRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllProposedDisableValidatorRequest) ProtoMessage() {} func (*QueryAllProposedDisableValidatorRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{10} + return fileDescriptor_383eb6436ed732ef, []int{10} } func (m *QueryAllProposedDisableValidatorRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -548,7 +548,7 @@ func (m *QueryAllProposedDisableValidatorResponse) Reset() { func (m *QueryAllProposedDisableValidatorResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllProposedDisableValidatorResponse) ProtoMessage() {} func (*QueryAllProposedDisableValidatorResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{11} + return fileDescriptor_383eb6436ed732ef, []int{11} } func (m *QueryAllProposedDisableValidatorResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -599,7 +599,7 @@ func (m *QueryGetDisabledValidatorRequest) Reset() { *m = QueryGetDisabl func (m *QueryGetDisabledValidatorRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetDisabledValidatorRequest) ProtoMessage() {} func (*QueryGetDisabledValidatorRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{12} + return fileDescriptor_383eb6436ed732ef, []int{12} } func (m *QueryGetDisabledValidatorRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -643,7 +643,7 @@ func (m *QueryGetDisabledValidatorResponse) Reset() { *m = QueryGetDisab func (m *QueryGetDisabledValidatorResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetDisabledValidatorResponse) ProtoMessage() {} func (*QueryGetDisabledValidatorResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{13} + return fileDescriptor_383eb6436ed732ef, []int{13} } func (m *QueryGetDisabledValidatorResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -687,7 +687,7 @@ func (m *QueryAllDisabledValidatorRequest) Reset() { *m = QueryAllDisabl func (m *QueryAllDisabledValidatorRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllDisabledValidatorRequest) ProtoMessage() {} func (*QueryAllDisabledValidatorRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{14} + return fileDescriptor_383eb6436ed732ef, []int{14} } func (m *QueryAllDisabledValidatorRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -732,7 +732,7 @@ func (m *QueryAllDisabledValidatorResponse) Reset() { *m = QueryAllDisab func (m *QueryAllDisabledValidatorResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllDisabledValidatorResponse) ProtoMessage() {} func (*QueryAllDisabledValidatorResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{15} + return fileDescriptor_383eb6436ed732ef, []int{15} } func (m *QueryAllDisabledValidatorResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -785,7 +785,7 @@ func (m *QueryGetRejectedDisableValidatorRequest) Reset() { func (m *QueryGetRejectedDisableValidatorRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetRejectedDisableValidatorRequest) ProtoMessage() {} func (*QueryGetRejectedDisableValidatorRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{16} + return fileDescriptor_383eb6436ed732ef, []int{16} } func (m *QueryGetRejectedDisableValidatorRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -831,7 +831,7 @@ func (m *QueryGetRejectedDisableValidatorResponse) Reset() { func (m *QueryGetRejectedDisableValidatorResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetRejectedDisableValidatorResponse) ProtoMessage() {} func (*QueryGetRejectedDisableValidatorResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{17} + return fileDescriptor_383eb6436ed732ef, []int{17} } func (m *QueryGetRejectedDisableValidatorResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -877,7 +877,7 @@ func (m *QueryAllRejectedDisableValidatorRequest) Reset() { func (m *QueryAllRejectedDisableValidatorRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllRejectedDisableValidatorRequest) ProtoMessage() {} func (*QueryAllRejectedDisableValidatorRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{18} + return fileDescriptor_383eb6436ed732ef, []int{18} } func (m *QueryAllRejectedDisableValidatorRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -924,7 +924,7 @@ func (m *QueryAllRejectedDisableValidatorResponse) Reset() { func (m *QueryAllRejectedDisableValidatorResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllRejectedDisableValidatorResponse) ProtoMessage() {} func (*QueryAllRejectedDisableValidatorResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_31b4d73ed8fedd8d, []int{19} + return fileDescriptor_383eb6436ed732ef, []int{19} } func (m *QueryAllRejectedDisableValidatorResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -990,77 +990,79 @@ func init() { proto.RegisterType((*QueryAllRejectedDisableValidatorResponse)(nil), "zigbeealliance.distributedcomplianceledger.validator.QueryAllRejectedDisableValidatorResponse") } -func init() { proto.RegisterFile("validator/query.proto", fileDescriptor_31b4d73ed8fedd8d) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/validator/query.proto", fileDescriptor_383eb6436ed732ef) +} -var fileDescriptor_31b4d73ed8fedd8d = []byte{ - // 1063 bytes of a gzipped FileDescriptorProto +var fileDescriptor_383eb6436ed732ef = []byte{ + // 1070 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x58, 0x41, 0x6f, 0xdc, 0x44, - 0x18, 0xcd, 0xec, 0x52, 0x50, 0x06, 0x2e, 0x19, 0xb6, 0x68, 0xbb, 0x84, 0x6d, 0x19, 0xa5, 0xa4, - 0x44, 0xb2, 0xad, 0x14, 0x10, 0x17, 0xa4, 0x6a, 0x03, 0x22, 0xa8, 0x42, 0x10, 0xb6, 0x52, 0x82, - 0x90, 0xca, 0xca, 0xbb, 0x1e, 0x19, 0x97, 0x89, 0xc7, 0xb1, 0x9d, 0x96, 0x52, 0x15, 0xa1, 0x1e, - 0xb8, 0x70, 0x89, 0xe0, 0x0f, 0x70, 0xe0, 0xce, 0x85, 0x2b, 0x12, 0xc7, 0x72, 0x40, 0xaa, 0xca, - 0x85, 0x13, 0x42, 0x09, 0x17, 0x0e, 0xf4, 0x82, 0x90, 0x38, 0x56, 0xb6, 0xbf, 0x59, 0x6f, 0xd6, - 0x9e, 0x75, 0xb2, 0xf6, 0xee, 0xcd, 0xce, 0x8c, 0xdf, 0x37, 0xef, 0x7d, 0xcf, 0x9f, 0xdf, 0x06, - 0x9f, 0xbd, 0x69, 0x72, 0xc7, 0x32, 0x43, 0xe1, 0x1b, 0x7b, 0xfb, 0xcc, 0xbf, 0xad, 0x7b, 0xbe, - 0x08, 0x05, 0x79, 0xf5, 0x73, 0xc7, 0xee, 0x33, 0x66, 0x72, 0xee, 0x98, 0xee, 0x80, 0xe9, 0x96, - 0x13, 0x84, 0xbe, 0xd3, 0xdf, 0x0f, 0x99, 0x35, 0x10, 0xbb, 0x5e, 0xf2, 0x57, 0xce, 0x2c, 0x9b, - 0xf9, 0xfa, 0x10, 0xa1, 0xb5, 0x6c, 0x0b, 0x61, 0x73, 0x66, 0x98, 0x9e, 0x63, 0x98, 0xae, 0x2b, - 0x42, 0x33, 0x74, 0x84, 0x1b, 0x24, 0x98, 0xad, 0xb5, 0x81, 0x08, 0x76, 0x45, 0x60, 0xf4, 0xcd, - 0x80, 0x25, 0xc5, 0x8c, 0x9b, 0xeb, 0x7d, 0x16, 0x9a, 0xeb, 0x86, 0x67, 0xda, 0x8e, 0x1b, 0x6f, - 0x86, 0xbd, 0xe7, 0xd2, 0x63, 0x0d, 0xaf, 0x60, 0x69, 0x25, 0x5d, 0xe2, 0x66, 0x10, 0xf6, 0x86, - 0xb7, 0x3d, 0x4f, 0xdc, 0x62, 0x72, 0xd7, 0x5a, 0xba, 0xcb, 0xf3, 0x85, 0x27, 0x02, 0x66, 0xf5, - 0x2c, 0x27, 0x30, 0xfb, 0x9c, 0xf5, 0xc6, 0x11, 0x69, 0xba, 0x17, 0xb6, 0x58, 0x93, 0xf6, 0xf8, - 0xec, 0x06, 0x1b, 0x84, 0x39, 0x7b, 0x1a, 0xb6, 0xb0, 0x45, 0x7c, 0x69, 0x44, 0x57, 0x92, 0x4a, - 0x42, 0xbb, 0x97, 0x2c, 0x24, 0x37, 0xc9, 0x12, 0xbd, 0x8a, 0x9b, 0x1f, 0x44, 0x3a, 0x6c, 0xb2, - 0x70, 0x5b, 0x62, 0x75, 0xd9, 0xde, 0x3e, 0x0b, 0x42, 0xa2, 0xe3, 0x33, 0xe2, 0x96, 0xcb, 0xfc, - 0x26, 0xba, 0x80, 0x2e, 0x2d, 0x6e, 0x34, 0x1f, 0xfe, 0xa8, 0x35, 0xe0, 0xe1, 0x8e, 0x65, 0xf9, - 0x2c, 0x08, 0xae, 0x85, 0xbe, 0xe3, 0xda, 0xdd, 0x64, 0x1b, 0xfd, 0x12, 0xe1, 0x73, 0x39, 0x60, - 0x81, 0x27, 0xdc, 0x80, 0x91, 0x01, 0x5e, 0x1c, 0x9e, 0x36, 0x46, 0x7c, 0xfa, 0xf2, 0x15, 0x7d, - 0x9a, 0x1e, 0xeb, 0x43, 0xec, 0x8d, 0x27, 0xee, 0xff, 0x71, 0x7e, 0xa1, 0x9b, 0xe2, 0xd2, 0x3e, - 0xd0, 0xe9, 0x70, 0x9e, 0xa1, 0xf3, 0x36, 0xc6, 0x69, 0x93, 0xe1, 0x04, 0x2f, 0xe9, 0x40, 0x28, - 0x72, 0x84, 0x9e, 0xd8, 0x0f, 0x1c, 0xa1, 0x6f, 0x99, 0x36, 0x83, 0x67, 0xbb, 0x23, 0x4f, 0xd2, - 0x5f, 0x24, 0xcd, 0xe3, 0x45, 0xf2, 0x69, 0xd6, 0x67, 0x41, 0x93, 0x6c, 0x1e, 0xa3, 0x52, 0x8b, - 0xa9, 0xac, 0x16, 0x52, 0x49, 0x4e, 0x78, 0x8c, 0xcb, 0x35, 0xfc, 0xa2, 0xec, 0xd8, 0xbb, 0x66, - 0x90, 0x76, 0x6d, 0x2b, 0xf2, 0xf1, 0xb4, 0x3e, 0xf8, 0x1e, 0x61, 0x3a, 0x09, 0x15, 0x94, 0xfa, - 0x02, 0x13, 0x9e, 0x59, 0x85, 0xbe, 0xbc, 0x33, 0x9d, 0x64, 0xd9, 0x6a, 0xa0, 0x5d, 0x4e, 0x25, - 0xfa, 0x29, 0x70, 0xef, 0x70, 0xae, 0xe6, 0x5e, 0x95, 0x69, 0xfe, 0x91, 0x9a, 0x28, 0xaa, 0x15, - 0x68, 0x52, 0x9f, 0x8f, 0x26, 0xd5, 0x19, 0xeb, 0x4d, 0xbc, 0x2a, 0x2d, 0xb0, 0x05, 0xc3, 0xef, - 0xad, 0x64, 0xb0, 0x65, 0xde, 0xcb, 0x26, 0x7e, 0xca, 0x4c, 0x6c, 0x94, 0x18, 0xac, 0x2b, 0x6f, - 0xe9, 0x4f, 0x08, 0x5f, 0x2a, 0x46, 0x01, 0xe9, 0x0e, 0x10, 0x6e, 0x7a, 0x8a, 0x4d, 0xd0, 0xb8, - 0xf7, 0xa6, 0x53, 0x50, 0x55, 0x1a, 0x74, 0x54, 0x56, 0xa5, 0x7b, 0x20, 0x42, 0x87, 0xf3, 0x22, - 0x11, 0xaa, 0xf2, 0xd9, 0x57, 0x35, 0x90, 0x6c, 0x62, 0xcd, 0x93, 0x49, 0x56, 0x9f, 0xbf, 0x64, - 0xd5, 0x19, 0xf0, 0x0d, 0x7c, 0x41, 0x5a, 0x07, 0x8a, 0x58, 0xa7, 0x70, 0xde, 0x77, 0x28, 0x1d, - 0x8c, 0x39, 0x8f, 0x83, 0x7e, 0x77, 0xf0, 0x92, 0x35, 0xbe, 0x08, 0xbd, 0xdb, 0x9c, 0x4e, 0xb7, - 0x4c, 0x2d, 0x10, 0x2c, 0x5b, 0x87, 0xde, 0x00, 0x82, 0x1d, 0xce, 0x95, 0x04, 0xab, 0x72, 0xd5, - 0xdf, 0x28, 0x9d, 0x95, 0xa7, 0x96, 0xa3, 0x3e, 0x0f, 0x39, 0xaa, 0x33, 0xce, 0x95, 0x74, 0x72, - 0x75, 0x21, 0x66, 0xa9, 0x5e, 0xda, 0xc6, 0xb1, 0x0f, 0xa3, 0xfc, 0xfc, 0xfd, 0x30, 0x32, 0xb5, - 0xd4, 0x08, 0xa0, 0xd9, 0x3d, 0x84, 0x97, 0x64, 0x9a, 0xab, 0x68, 0x5c, 0xa9, 0x6a, 0x4a, 0xed, - 0x32, 0xe5, 0x46, 0xe7, 0x54, 0x11, 0xe5, 0xaa, 0x1c, 0xf5, 0x3f, 0x4a, 0xe7, 0xd4, 0xd4, 0x22, - 0xd5, 0xe7, 0x28, 0x52, 0x65, 0x06, 0xbb, 0xfc, 0xf3, 0xb3, 0xf8, 0x4c, 0x4c, 0x9d, 0x3c, 0x44, - 0x78, 0x31, 0x2d, 0x30, 0x25, 0x13, 0x55, 0x7c, 0x6f, 0xbd, 0x5f, 0x19, 0x5e, 0x42, 0x82, 0xae, - 0xdc, 0xfb, 0xed, 0xaf, 0x6f, 0x6b, 0x6d, 0xb2, 0x6c, 0x58, 0x03, 0x9e, 0xfe, 0x28, 0x32, 0x5c, - 0x61, 0xb1, 0xc0, 0xb8, 0x13, 0xbb, 0xff, 0x2e, 0xf9, 0x15, 0xe1, 0x67, 0x86, 0xcf, 0x76, 0x38, - 0x2f, 0xc5, 0x2b, 0x27, 0xc7, 0x97, 0xe2, 0x95, 0x17, 0xd9, 0xe9, 0x72, 0xcc, 0xeb, 0x39, 0xd2, - 0xc8, 0xe3, 0x45, 0xfe, 0x45, 0x98, 0x64, 0x33, 0x14, 0xd9, 0x29, 0xa7, 0xae, 0x32, 0x71, 0xb6, - 0x3e, 0xac, 0x1e, 0x18, 0x78, 0xae, 0xc5, 0x3c, 0x57, 0x08, 0x1d, 0xe3, 0x19, 0xe5, 0x40, 0x2d, - 0xfe, 0xe5, 0x9a, 0x76, 0xf1, 0x11, 0xc2, 0x67, 0xb3, 0x50, 0x51, 0x3b, 0x77, 0xca, 0xc9, 0x3f, - 0x1b, 0xe2, 0x13, 0x53, 0x35, 0xa5, 0x31, 0xf1, 0x65, 0xd2, 0x52, 0x13, 0x27, 0xdf, 0xd4, 0x70, - 0x53, 0x95, 0x5a, 0xc8, 0xf5, 0x72, 0x3d, 0x29, 0x08, 0x7f, 0xad, 0x8f, 0x67, 0x05, 0x0f, 0xfc, - 0x5f, 0x8f, 0xf9, 0xaf, 0x13, 0x63, 0x8c, 0xbf, 0x4c, 0x61, 0x1a, 0x7c, 0x4e, 0x35, 0x78, 0x93, - 0x21, 0x05, 0xdd, 0x25, 0x5f, 0xd7, 0xf0, 0xf3, 0x2a, 0xf4, 0xc8, 0x0b, 0xd7, 0xcb, 0xb5, 0x6c, - 0x96, 0xba, 0x9c, 0x20, 0xff, 0x52, 0x2d, 0xd6, 0x65, 0x95, 0x5c, 0x3c, 0x91, 0x2e, 0xe4, 0x3f, - 0x84, 0x97, 0x32, 0x89, 0x84, 0x6c, 0x97, 0x6b, 0x9e, 0x2a, 0xbb, 0xb5, 0x76, 0x2a, 0xc7, 0x05, - 0xd6, 0x46, 0xcc, 0xfa, 0x65, 0xb2, 0x3a, 0xc6, 0x5a, 0x66, 0xaa, 0x8c, 0x0b, 0x1e, 0x21, 0xdc, - 0xc8, 0xc0, 0x45, 0xed, 0xdf, 0x2e, 0xd7, 0x9f, 0x99, 0x50, 0x9f, 0x94, 0x50, 0xe9, 0xc5, 0x98, - 0xfa, 0x79, 0xf2, 0xc2, 0x44, 0xea, 0xe4, 0xa0, 0x86, 0x9b, 0xaa, 0x80, 0x50, 0x76, 0x16, 0x14, - 0x04, 0xac, 0xb2, 0xb3, 0xa0, 0x28, 0x4b, 0xd1, 0xd7, 0x62, 0x09, 0x0c, 0xa2, 0x8d, 0x49, 0x20, - 0x03, 0xcf, 0xf8, 0x2c, 0x80, 0xef, 0x41, 0x34, 0x09, 0x54, 0xd8, 0x15, 0x4c, 0x82, 0x59, 0xaa, - 0x72, 0x82, 0x84, 0xa9, 0x9c, 0x04, 0xf9, 0xaa, 0x6c, 0x58, 0xf7, 0x0f, 0xdb, 0xe8, 0xc1, 0x61, - 0x1b, 0xfd, 0x79, 0xd8, 0x46, 0x07, 0x47, 0xed, 0x85, 0x07, 0x47, 0xed, 0x85, 0xdf, 0x8f, 0xda, - 0x0b, 0x1f, 0x5d, 0xb5, 0x9d, 0xf0, 0x93, 0xfd, 0xbe, 0x3e, 0x10, 0xbb, 0x46, 0x72, 0x64, 0x4d, - 0x9e, 0xd9, 0x18, 0x39, 0xb3, 0x96, 0x1e, 0x5a, 0x4b, 0x4e, 0x6d, 0x7c, 0x36, 0x52, 0x36, 0xbc, - 0xed, 0xb1, 0xa0, 0xff, 0x64, 0xfc, 0x2f, 0xda, 0x57, 0x1e, 0x07, 0x00, 0x00, 0xff, 0xff, 0x08, - 0x12, 0xe3, 0x6f, 0x21, 0x17, 0x00, 0x00, + 0x18, 0xcd, 0xec, 0x52, 0x50, 0x06, 0x2e, 0x19, 0x16, 0xb4, 0x5d, 0xc2, 0xb6, 0x8c, 0x5a, 0x52, + 0x2a, 0xd9, 0x56, 0x0a, 0x88, 0x0b, 0x52, 0xd9, 0x50, 0x11, 0x54, 0x01, 0x0d, 0x5b, 0x91, 0x20, + 0xa4, 0xb2, 0xf2, 0xae, 0x47, 0xc6, 0x65, 0xe2, 0x71, 0x6c, 0xa7, 0xa5, 0x54, 0x45, 0xa8, 0x07, + 0x2e, 0x5c, 0x22, 0xf8, 0x03, 0x1c, 0xb8, 0x73, 0xe1, 0x8a, 0xc4, 0xb1, 0x1c, 0x90, 0xaa, 0x72, + 0xe1, 0x84, 0x50, 0xc2, 0x85, 0x03, 0xbd, 0x20, 0x24, 0x8e, 0x95, 0xc7, 0x9f, 0xd7, 0xbb, 0x6b, + 0xcf, 0x6e, 0x62, 0x7b, 0x73, 0xb3, 0xd7, 0xf6, 0xfb, 0xe6, 0xbd, 0xef, 0xcd, 0xe7, 0xb7, 0xc6, + 0x6f, 0x7c, 0xee, 0xd8, 0x7d, 0xc6, 0x4c, 0xce, 0x1d, 0xd3, 0x1d, 0x30, 0xc3, 0x72, 0x82, 0xd0, + 0x77, 0xfa, 0xbb, 0x21, 0xb3, 0x06, 0x62, 0xdb, 0x8b, 0x7f, 0xe5, 0xcc, 0xb2, 0x99, 0x6f, 0xdc, + 0x30, 0xb9, 0x63, 0x99, 0xa1, 0xf0, 0x8d, 0x9d, 0x5d, 0xe6, 0xdf, 0xd2, 0x3d, 0x5f, 0x84, 0x82, + 0xbc, 0x32, 0x8e, 0xa0, 0x4f, 0x41, 0xd0, 0x87, 0x08, 0xad, 0x65, 0x5b, 0x08, 0x9b, 0x33, 0xc3, + 0xf4, 0x1c, 0xc3, 0x74, 0x5d, 0x11, 0x9a, 0xa1, 0x23, 0xdc, 0x20, 0xc6, 0x6c, 0x9d, 0x1f, 0x88, + 0x60, 0x5b, 0x04, 0x46, 0xdf, 0x0c, 0x58, 0x5c, 0xcc, 0xb8, 0xb1, 0xda, 0x67, 0xa1, 0xb9, 0x6a, + 0x78, 0xa6, 0xed, 0xb8, 0xf2, 0x66, 0xb8, 0xf7, 0x52, 0x21, 0x06, 0xc3, 0x23, 0x40, 0xb9, 0x52, + 0x08, 0x85, 0x9b, 0x41, 0xd8, 0x1b, 0x9e, 0xf6, 0x3c, 0x71, 0x93, 0x25, 0x80, 0x1f, 0x14, 0x02, + 0xf4, 0x7c, 0xe1, 0x89, 0x80, 0x59, 0x3d, 0xcb, 0x09, 0xcc, 0x3e, 0x67, 0xbd, 0xc9, 0x75, 0xbe, + 0x5b, 0x08, 0x16, 0xd0, 0xac, 0x8a, 0xe0, 0x7c, 0x76, 0x9d, 0x0d, 0xc2, 0x1c, 0xb8, 0x86, 0x2d, + 0x6c, 0x21, 0x0f, 0x8d, 0xe8, 0x08, 0x7e, 0x3d, 0x19, 0x77, 0xb3, 0x17, 0x5f, 0x88, 0x4f, 0xe2, + 0x4b, 0xf4, 0x32, 0x6e, 0xbe, 0x1f, 0xb5, 0x77, 0x9d, 0x85, 0x9b, 0x09, 0x56, 0x97, 0xed, 0xec, + 0xb2, 0x20, 0x24, 0x3a, 0x3e, 0x21, 0x6e, 0xba, 0xcc, 0x6f, 0xa2, 0xd3, 0xe8, 0xdc, 0xe2, 0x5a, + 0xf3, 0xc1, 0x8f, 0x5a, 0x03, 0x1e, 0xee, 0x58, 0x96, 0xcf, 0x82, 0xe0, 0x6a, 0xe8, 0x3b, 0xae, + 0xdd, 0x8d, 0x6f, 0xa3, 0x5f, 0x22, 0x7c, 0x32, 0x07, 0x2c, 0xf0, 0x84, 0x1b, 0x30, 0x32, 0xc0, + 0x8b, 0xc3, 0xd5, 0x4a, 0xc4, 0x27, 0x2f, 0x5c, 0xd4, 0x8b, 0x58, 0x57, 0x1f, 0x62, 0xaf, 0x3d, + 0x76, 0xef, 0x8f, 0x53, 0x0b, 0xdd, 0x14, 0x97, 0xf6, 0x81, 0x4e, 0x87, 0xf3, 0x0c, 0x9d, 0xb7, + 0x30, 0x4e, 0xbd, 0x0b, 0x2b, 0x78, 0x51, 0x07, 0x42, 0x91, 0xd1, 0xf5, 0x78, 0x57, 0x81, 0xd1, + 0xf5, 0x0d, 0xd3, 0x66, 0xf0, 0x6c, 0x77, 0xe4, 0x49, 0xfa, 0x4b, 0x42, 0x73, 0xbc, 0x48, 0x3e, + 0xcd, 0xfa, 0x3c, 0x68, 0x92, 0xf5, 0x31, 0x2a, 0x35, 0x49, 0x65, 0x65, 0x26, 0x95, 0x78, 0x85, + 0x63, 0x5c, 0xae, 0xe2, 0x17, 0x92, 0x8e, 0xbd, 0x63, 0x06, 0x69, 0xd7, 0x36, 0xa2, 0x8d, 0x54, + 0xd4, 0x07, 0xdf, 0x23, 0x4c, 0xa7, 0xa1, 0x82, 0x52, 0x5f, 0x60, 0xc2, 0x33, 0x57, 0xa1, 0x2f, + 0x6f, 0x17, 0x93, 0x2c, 0x5b, 0x0d, 0xb4, 0xcb, 0xa9, 0x44, 0x3f, 0x05, 0xee, 0x1d, 0xce, 0xd5, + 0xdc, 0xab, 0x32, 0xcd, 0x3f, 0x89, 0x26, 0x8a, 0x6a, 0x33, 0x34, 0xa9, 0x1f, 0x8f, 0x26, 0xd5, + 0x19, 0xeb, 0x4d, 0xbc, 0x92, 0x58, 0x60, 0x03, 0x46, 0xea, 0xa5, 0x78, 0x06, 0x66, 0xf6, 0x65, + 0x13, 0x3f, 0x61, 0xc6, 0x36, 0x8a, 0x0d, 0xd6, 0x4d, 0x4e, 0xe9, 0x4f, 0x08, 0x9f, 0x9b, 0x8d, + 0x02, 0xd2, 0xed, 0x21, 0xdc, 0xf4, 0x14, 0x37, 0x41, 0xe3, 0xde, 0x2b, 0xa6, 0xa0, 0xaa, 0x34, + 0xe8, 0xa8, 0xac, 0x4a, 0x77, 0x40, 0x84, 0x0e, 0xe7, 0xb3, 0x44, 0xa8, 0xca, 0x67, 0x5f, 0xd5, + 0x40, 0xb2, 0xa9, 0x35, 0x0f, 0x27, 0x59, 0xfd, 0xf8, 0x25, 0xab, 0xce, 0x80, 0xaf, 0xe3, 0xd3, + 0x89, 0x75, 0xa0, 0x88, 0x75, 0x04, 0xe7, 0x7d, 0x87, 0xd2, 0xc1, 0x98, 0xf3, 0x38, 0xe8, 0x77, + 0x1b, 0x2f, 0x59, 0x93, 0x17, 0xa1, 0x77, 0xeb, 0xc5, 0x74, 0xcb, 0xd4, 0x02, 0xc1, 0xb2, 0x75, + 0xe8, 0x75, 0x20, 0xd8, 0xe1, 0x5c, 0x49, 0xb0, 0x2a, 0x57, 0xfd, 0x8d, 0xd2, 0x59, 0x79, 0x64, + 0x39, 0xea, 0xc7, 0x21, 0x47, 0x75, 0xc6, 0xb9, 0x98, 0x4e, 0xae, 0x2e, 0xc4, 0x2c, 0xd5, 0xa6, + 0x6d, 0x8c, 0xbd, 0x18, 0x93, 0xd7, 0xdf, 0x0f, 0x23, 0x53, 0x4b, 0x8d, 0x00, 0x9a, 0xdd, 0x45, + 0x78, 0x29, 0x49, 0x73, 0x15, 0x8d, 0x2b, 0x55, 0xcd, 0x44, 0xbb, 0x4c, 0xb9, 0xd1, 0x39, 0x35, + 0x8b, 0x72, 0x55, 0x8e, 0xfa, 0x1f, 0xa5, 0x73, 0xaa, 0xb0, 0x48, 0xf5, 0x63, 0x14, 0xa9, 0x32, + 0x83, 0x5d, 0xf8, 0xf9, 0x69, 0x7c, 0x42, 0x52, 0x27, 0x0f, 0x10, 0x5e, 0x4c, 0x0b, 0x14, 0x64, + 0xa2, 0x8a, 0xef, 0xad, 0x2b, 0x95, 0xe1, 0xc5, 0x24, 0xe8, 0x99, 0xbb, 0xbf, 0xfd, 0xf5, 0x6d, + 0xad, 0x4d, 0x96, 0x0d, 0x6b, 0xc0, 0x47, 0xfe, 0x8d, 0xb8, 0xc2, 0x62, 0x81, 0x71, 0x5b, 0xba, + 0xff, 0x0e, 0xf9, 0x15, 0xe1, 0xa7, 0x86, 0xcf, 0x76, 0x38, 0x2f, 0xc5, 0x2b, 0x27, 0xc7, 0x97, + 0xe2, 0x95, 0x17, 0xd9, 0xe9, 0xb2, 0xe4, 0xf5, 0x2c, 0x69, 0xe4, 0xf1, 0x22, 0xff, 0x22, 0x4c, + 0xb2, 0x19, 0x8a, 0x6c, 0x95, 0x53, 0x57, 0x99, 0x38, 0x5b, 0x1f, 0x56, 0x0f, 0x0c, 0x3c, 0xcf, + 0x4b, 0x9e, 0x67, 0x08, 0x9d, 0xe0, 0x19, 0xe5, 0x40, 0x4d, 0xfe, 0x75, 0x4e, 0xbb, 0xf8, 0x10, + 0xe1, 0x67, 0xb2, 0x50, 0x51, 0x3b, 0xb7, 0xca, 0xc9, 0x3f, 0x1f, 0xe2, 0x53, 0x53, 0x35, 0xa5, + 0x92, 0xf8, 0x32, 0x69, 0xa9, 0x89, 0x93, 0x6f, 0x6a, 0xb8, 0xa9, 0x4a, 0x2d, 0xe4, 0x5a, 0xb9, + 0x9e, 0xcc, 0x08, 0x7f, 0xad, 0x8f, 0xe7, 0x05, 0x0f, 0xfc, 0x5f, 0x93, 0xfc, 0x57, 0x89, 0x31, + 0xc1, 0x3f, 0x49, 0x61, 0x1a, 0xbc, 0x4e, 0x35, 0xd8, 0xc9, 0x90, 0x82, 0xee, 0x90, 0xaf, 0x6b, + 0xf8, 0x39, 0x15, 0x7a, 0xe4, 0x85, 0x6b, 0xe5, 0x5a, 0x36, 0x4f, 0x5d, 0x0e, 0x91, 0x7f, 0xa9, + 0x26, 0x75, 0x59, 0x21, 0x67, 0x0f, 0xa5, 0x0b, 0xf9, 0x0f, 0xe1, 0xa5, 0x4c, 0x22, 0x21, 0x9b, + 0xe5, 0x9a, 0xa7, 0xca, 0x6e, 0xad, 0xad, 0xca, 0x71, 0x81, 0xb5, 0x21, 0x59, 0xbf, 0x44, 0x56, + 0x26, 0x58, 0x27, 0x99, 0x2a, 0xe3, 0x82, 0x87, 0x08, 0x37, 0x32, 0x70, 0x51, 0xfb, 0x37, 0xcb, + 0xf5, 0x67, 0x2e, 0xd4, 0xa7, 0x25, 0x54, 0x7a, 0x56, 0x52, 0x3f, 0x45, 0x9e, 0x9f, 0x4a, 0x9d, + 0xec, 0xd5, 0x70, 0x53, 0x15, 0x10, 0xca, 0xce, 0x82, 0x19, 0x01, 0xab, 0xec, 0x2c, 0x98, 0x95, + 0xa5, 0xe8, 0xab, 0x52, 0x02, 0x83, 0x68, 0x13, 0x12, 0x24, 0x81, 0x67, 0x72, 0x16, 0xc0, 0xfb, + 0x20, 0x9a, 0x04, 0x2a, 0xec, 0x0a, 0x26, 0xc1, 0x3c, 0x55, 0x39, 0x44, 0xc2, 0x54, 0x4e, 0x82, + 0x7c, 0x55, 0xd6, 0xac, 0x7b, 0xfb, 0x6d, 0x74, 0x7f, 0xbf, 0x8d, 0xfe, 0xdc, 0x6f, 0xa3, 0xbd, + 0x83, 0xf6, 0xc2, 0xfd, 0x83, 0xf6, 0xc2, 0xef, 0x07, 0xed, 0x85, 0x8f, 0x2e, 0xdb, 0x4e, 0xf8, + 0xc9, 0x6e, 0x5f, 0x1f, 0x88, 0x6d, 0x23, 0x5e, 0xb2, 0x96, 0xf7, 0x6d, 0x57, 0x4b, 0x17, 0xad, + 0xc1, 0xd7, 0xdd, 0xcf, 0x46, 0xca, 0x86, 0xb7, 0x3c, 0x16, 0xf4, 0x1f, 0x97, 0x9f, 0x68, 0x5f, + 0x7e, 0x14, 0x00, 0x00, 0xff, 0xff, 0x90, 0xcf, 0xe8, 0x32, 0x23, 0x18, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1484,7 +1486,7 @@ var _Query_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "validator/query.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/validator/query.proto", } func (m *QueryGetValidatorRequest) Marshal() (dAtA []byte, err error) { diff --git a/x/validator/types/query.pb.gw.go b/x/validator/types/query.pb.gw.go index a21de7909..5a82516b1 100644 --- a/x/validator/types/query.pb.gw.go +++ b/x/validator/types/query.pb.gw.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: validator/query.proto +// source: zigbeealliance/distributedcomplianceledger/validator/query.proto /* Package types is a reverse proxy. @@ -964,25 +964,25 @@ func RegisterQueryHandlerClient(ctx context.Context, mux *runtime.ServeMux, clie } var ( - pattern_Query_Validator_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "validator", "nodes", "owner"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_Validator_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "validator", "nodes", "owner"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_ValidatorAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "validator", "nodes"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_ValidatorAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "validator", "nodes"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_LastValidatorPower_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "validator", "last-powers", "owner"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_LastValidatorPower_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "validator", "last-powers", "owner"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_LastValidatorPowerAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "validator", "last-powers"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_LastValidatorPowerAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "validator", "last-powers"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_ProposedDisableValidator_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "validator", "proposed-disable-nodes", "address"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_ProposedDisableValidator_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "validator", "proposed-disable-nodes", "address"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_ProposedDisableValidatorAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "validator", "proposed-disable-nodes"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_ProposedDisableValidatorAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "validator", "proposed-disable-nodes"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_DisabledValidator_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "validator", "disabled-nodes", "address"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_DisabledValidator_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "validator", "disabled-nodes", "address"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_DisabledValidatorAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "validator", "disabled-nodes"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_DisabledValidatorAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "validator", "disabled-nodes"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_RejectedDisableValidator_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "validator", "rejected-disable-nodes", "owner"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_RejectedDisableValidator_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "validator", "rejected-disable-nodes", "owner"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_RejectedDisableValidatorAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "validator", "rejected-disable-nodes"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_RejectedDisableValidatorAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "validator", "rejected-disable-nodes"}, "", runtime.AssumeColonVerbOpt(false))) ) var ( diff --git a/x/validator/types/rejected_validator.pb.go b/x/validator/types/rejected_validator.pb.go index fcd4d20e8..020617bbb 100644 --- a/x/validator/types/rejected_validator.pb.go +++ b/x/validator/types/rejected_validator.pb.go @@ -1,13 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: validator/rejected_validator.proto +// source: zigbeealliance/distributedcomplianceledger/validator/rejected_validator.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - sdk "github.com/cosmos/cosmos-sdk/types" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -35,7 +34,7 @@ func (m *RejectedDisableValidator) Reset() { *m = RejectedDisableValidat func (m *RejectedDisableValidator) String() string { return proto.CompactTextString(m) } func (*RejectedDisableValidator) ProtoMessage() {} func (*RejectedDisableValidator) Descriptor() ([]byte, []int) { - return fileDescriptor_e8d82fd5546ec0be, []int{0} + return fileDescriptor_3d5230ea0750343d, []int{0} } func (m *RejectedDisableValidator) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -64,9 +63,11 @@ func (m *RejectedDisableValidator) XXX_DiscardUnknown() { var xxx_messageInfo_RejectedDisableValidator proto.InternalMessageInfo -func (m *RejectedDisableValidator) GetAddress() sdk.ValAddress { - valAddr, _ := sdk.ValAddressFromBech32(m.Address) - return valAddr +func (m *RejectedDisableValidator) GetAddress() string { + if m != nil { + return m.Address + } + return "" } func (m *RejectedDisableValidator) GetCreator() string { @@ -95,31 +96,31 @@ func init() { } func init() { - proto.RegisterFile("validator/rejected_validator.proto", fileDescriptor_e8d82fd5546ec0be) + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/validator/rejected_validator.proto", fileDescriptor_3d5230ea0750343d) } -var fileDescriptor_e8d82fd5546ec0be = []byte{ - // 307 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x91, 0x41, 0x4b, 0xf3, 0x30, - 0x18, 0xc7, 0xd7, 0xed, 0xe5, 0x1d, 0xab, 0xb7, 0xa2, 0x10, 0x77, 0x08, 0x63, 0xa7, 0x5d, 0x9a, - 0xc0, 0xf4, 0xe6, 0xc9, 0x21, 0x08, 0x1e, 0x27, 0x0a, 0x7a, 0x19, 0x69, 0xf2, 0x50, 0x23, 0x59, - 0x53, 0x92, 0x6c, 0xa8, 0x9f, 0xc2, 0x0f, 0xb3, 0x0f, 0xe1, 0x71, 0x78, 0xf2, 0x28, 0xed, 0x17, - 0x91, 0x2e, 0xed, 0xea, 0x4d, 0xd0, 0x63, 0x9e, 0x3c, 0xff, 0xdf, 0x2f, 0x79, 0x9e, 0x70, 0xbc, - 0x66, 0x4a, 0x0a, 0xe6, 0xb4, 0xa1, 0x06, 0x1e, 0x81, 0x3b, 0x10, 0x8b, 0x7d, 0x89, 0xe4, 0x46, - 0x3b, 0x1d, 0x9d, 0xbe, 0xc8, 0x34, 0x01, 0x60, 0x4a, 0x49, 0x96, 0x71, 0x20, 0x42, 0x5a, 0x67, - 0x64, 0xb2, 0x72, 0x20, 0xb8, 0x5e, 0xe6, 0xbe, 0xaa, 0x40, 0xa4, 0x60, 0xc8, 0x3e, 0x3b, 0x3c, - 0xe6, 0xda, 0x2e, 0xb5, 0x5d, 0xec, 0x18, 0xd4, 0x1f, 0x3c, 0x70, 0x78, 0xd4, 0x4a, 0x53, 0xc3, - 0x32, 0xe7, 0xcb, 0xe3, 0x4d, 0x37, 0x44, 0xf3, 0xfa, 0x11, 0x17, 0xd2, 0xb2, 0x44, 0xc1, 0x6d, - 0xd3, 0x18, 0x4d, 0xc3, 0x3e, 0x13, 0xc2, 0x80, 0xb5, 0x28, 0x18, 0x05, 0x93, 0xc1, 0x0c, 0xbd, - 0x6f, 0xe2, 0xc3, 0x1a, 0x7b, 0xee, 0x6f, 0xae, 0x9d, 0x91, 0x59, 0x3a, 0x6f, 0x1a, 0xab, 0x0c, - 0x37, 0x50, 0xc5, 0x51, 0xf7, 0xa7, 0x4c, 0xdd, 0x18, 0xdd, 0x85, 0x03, 0x96, 0xe7, 0x46, 0xaf, - 0x99, 0xb2, 0xa8, 0x37, 0xea, 0x4d, 0x0e, 0xa6, 0x67, 0xe4, 0x37, 0x03, 0x20, 0x97, 0xd5, 0xd7, - 0xe6, 0x2d, 0x2d, 0xba, 0x09, 0xfb, 0x7e, 0xc6, 0x16, 0xfd, 0xfb, 0x3b, 0xb8, 0x61, 0xcd, 0xc4, - 0x5b, 0x81, 0x83, 0x6d, 0x81, 0x83, 0xcf, 0x02, 0x07, 0xaf, 0x25, 0xee, 0x6c, 0x4b, 0xdc, 0xf9, - 0x28, 0x71, 0xe7, 0xfe, 0x2a, 0x95, 0xee, 0x61, 0x95, 0x10, 0xae, 0x97, 0xd4, 0x9b, 0xe2, 0x46, - 0x45, 0xbf, 0xa9, 0xe2, 0xd6, 0x15, 0x7b, 0x19, 0x7d, 0xa2, 0xed, 0x8a, 0xdc, 0x73, 0x0e, 0x36, - 0xf9, 0xbf, 0xdb, 0xd1, 0xc9, 0x57, 0x00, 0x00, 0x00, 0xff, 0xff, 0xd9, 0xce, 0x16, 0x1a, 0x31, - 0x02, 0x00, 0x00, +var fileDescriptor_3d5230ea0750343d = []byte{ + // 312 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xf2, 0xad, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x97, 0x25, 0xe6, 0x64, 0xa6, 0x24, 0x96, 0xe4, 0x17, 0xe9, 0x17, 0xa5, 0x66, + 0xa5, 0x26, 0x97, 0xa4, 0xa6, 0xc4, 0xc3, 0x85, 0xf4, 0x0a, 0x8a, 0xf2, 0x4b, 0xf2, 0x85, 0x4c, + 0x50, 0x8d, 0xd3, 0xc3, 0x63, 0x9c, 0x1e, 0x5c, 0xaf, 0x94, 0x64, 0x72, 0x7e, 0x71, 0x6e, 0x7e, + 0x71, 0x3c, 0xd8, 0x0c, 0x7d, 0x08, 0x07, 0x62, 0xa0, 0x94, 0x03, 0x59, 0xee, 0x4b, 0x2f, 0x4a, + 0xcc, 0x2b, 0x81, 0x98, 0xa0, 0xb4, 0x85, 0x89, 0x4b, 0x22, 0x08, 0xea, 0x5e, 0x97, 0xcc, 0xe2, + 0xc4, 0xa4, 0x9c, 0xd4, 0x30, 0x98, 0x42, 0x21, 0x23, 0x2e, 0xf6, 0xc4, 0x94, 0x94, 0xa2, 0xd4, + 0xe2, 0x62, 0x09, 0x46, 0x05, 0x46, 0x0d, 0x4e, 0x27, 0x89, 0x4b, 0x5b, 0x74, 0x45, 0xa0, 0x2e, + 0x70, 0x84, 0xc8, 0x04, 0x97, 0x14, 0x65, 0xe6, 0xa5, 0x07, 0xc1, 0x14, 0x82, 0xf4, 0x24, 0x17, + 0xa5, 0x82, 0xb4, 0x4b, 0x30, 0x11, 0xd2, 0x03, 0x55, 0x28, 0x14, 0xc9, 0xc5, 0x99, 0x58, 0x50, + 0x50, 0x94, 0x5f, 0x96, 0x98, 0x53, 0x2c, 0xc1, 0xac, 0xc0, 0xac, 0xc1, 0x6d, 0x64, 0xad, 0x47, + 0x4e, 0x58, 0xe9, 0xb9, 0x83, 0xbc, 0x16, 0x84, 0x30, 0x4d, 0x28, 0x94, 0x8b, 0x1d, 0x12, 0x1d, + 0xc5, 0x12, 0x2c, 0x94, 0x1b, 0x0c, 0x33, 0xcb, 0x29, 0xe5, 0xc4, 0x23, 0x39, 0xc6, 0x0b, 0x8f, + 0xe4, 0x18, 0x1f, 0x3c, 0x92, 0x63, 0x9c, 0xf0, 0x58, 0x8e, 0xe1, 0xc2, 0x63, 0x39, 0x86, 0x1b, + 0x8f, 0xe5, 0x18, 0xa2, 0xbc, 0xd2, 0x33, 0x4b, 0x32, 0x4a, 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, + 0x21, 0x36, 0xe9, 0x62, 0x8b, 0x1e, 0x5d, 0x84, 0x5d, 0xba, 0xd0, 0x08, 0xaa, 0x40, 0x8a, 0xa2, + 0x92, 0xca, 0x82, 0xd4, 0xe2, 0x24, 0x36, 0x70, 0x1c, 0x19, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, + 0xc0, 0x63, 0x8b, 0x0b, 0x87, 0x02, 0x00, 0x00, } func (m *RejectedDisableValidator) Marshal() (dAtA []byte, err error) { diff --git a/x/validator/types/tx.pb.go b/x/validator/types/tx.pb.go index f65428632..45e606885 100644 --- a/x/validator/types/tx.pb.go +++ b/x/validator/types/tx.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: validator/tx.proto +// source: zigbeealliance/distributedcomplianceledger/validator/tx.proto package types @@ -8,9 +8,9 @@ import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" types "github.com/cosmos/cosmos-sdk/codec/types" - _ "github.com/gogo/protobuf/gogoproto" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" @@ -40,7 +40,7 @@ func (m *MsgCreateValidator) Reset() { *m = MsgCreateValidator{} } func (m *MsgCreateValidator) String() string { return proto.CompactTextString(m) } func (*MsgCreateValidator) ProtoMessage() {} func (*MsgCreateValidator) Descriptor() ([]byte, []int) { - return fileDescriptor_3d1df81e3a65fd6a, []int{0} + return fileDescriptor_89ab666760ce926c, []int{0} } func (m *MsgCreateValidator) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -76,7 +76,7 @@ func (m *MsgCreateValidatorResponse) Reset() { *m = MsgCreateValidatorRe func (m *MsgCreateValidatorResponse) String() string { return proto.CompactTextString(m) } func (*MsgCreateValidatorResponse) ProtoMessage() {} func (*MsgCreateValidatorResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_3d1df81e3a65fd6a, []int{1} + return fileDescriptor_89ab666760ce926c, []int{1} } func (m *MsgCreateValidatorResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -116,7 +116,7 @@ func (m *MsgProposeDisableValidator) Reset() { *m = MsgProposeDisableVal func (m *MsgProposeDisableValidator) String() string { return proto.CompactTextString(m) } func (*MsgProposeDisableValidator) ProtoMessage() {} func (*MsgProposeDisableValidator) Descriptor() ([]byte, []int) { - return fileDescriptor_3d1df81e3a65fd6a, []int{2} + return fileDescriptor_89ab666760ce926c, []int{2} } func (m *MsgProposeDisableValidator) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -180,7 +180,7 @@ func (m *MsgProposeDisableValidatorResponse) Reset() { *m = MsgProposeDi func (m *MsgProposeDisableValidatorResponse) String() string { return proto.CompactTextString(m) } func (*MsgProposeDisableValidatorResponse) ProtoMessage() {} func (*MsgProposeDisableValidatorResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_3d1df81e3a65fd6a, []int{3} + return fileDescriptor_89ab666760ce926c, []int{3} } func (m *MsgProposeDisableValidatorResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -220,7 +220,7 @@ func (m *MsgApproveDisableValidator) Reset() { *m = MsgApproveDisableVal func (m *MsgApproveDisableValidator) String() string { return proto.CompactTextString(m) } func (*MsgApproveDisableValidator) ProtoMessage() {} func (*MsgApproveDisableValidator) Descriptor() ([]byte, []int) { - return fileDescriptor_3d1df81e3a65fd6a, []int{4} + return fileDescriptor_89ab666760ce926c, []int{4} } func (m *MsgApproveDisableValidator) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -284,7 +284,7 @@ func (m *MsgApproveDisableValidatorResponse) Reset() { *m = MsgApproveDi func (m *MsgApproveDisableValidatorResponse) String() string { return proto.CompactTextString(m) } func (*MsgApproveDisableValidatorResponse) ProtoMessage() {} func (*MsgApproveDisableValidatorResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_3d1df81e3a65fd6a, []int{5} + return fileDescriptor_89ab666760ce926c, []int{5} } func (m *MsgApproveDisableValidatorResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -321,7 +321,7 @@ func (m *MsgDisableValidator) Reset() { *m = MsgDisableValidator{} } func (m *MsgDisableValidator) String() string { return proto.CompactTextString(m) } func (*MsgDisableValidator) ProtoMessage() {} func (*MsgDisableValidator) Descriptor() ([]byte, []int) { - return fileDescriptor_3d1df81e3a65fd6a, []int{6} + return fileDescriptor_89ab666760ce926c, []int{6} } func (m *MsgDisableValidator) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -364,7 +364,7 @@ func (m *MsgDisableValidatorResponse) Reset() { *m = MsgDisableValidator func (m *MsgDisableValidatorResponse) String() string { return proto.CompactTextString(m) } func (*MsgDisableValidatorResponse) ProtoMessage() {} func (*MsgDisableValidatorResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_3d1df81e3a65fd6a, []int{7} + return fileDescriptor_89ab666760ce926c, []int{7} } func (m *MsgDisableValidatorResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -401,7 +401,7 @@ func (m *MsgEnableValidator) Reset() { *m = MsgEnableValidator{} } func (m *MsgEnableValidator) String() string { return proto.CompactTextString(m) } func (*MsgEnableValidator) ProtoMessage() {} func (*MsgEnableValidator) Descriptor() ([]byte, []int) { - return fileDescriptor_3d1df81e3a65fd6a, []int{8} + return fileDescriptor_89ab666760ce926c, []int{8} } func (m *MsgEnableValidator) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -444,7 +444,7 @@ func (m *MsgEnableValidatorResponse) Reset() { *m = MsgEnableValidatorRe func (m *MsgEnableValidatorResponse) String() string { return proto.CompactTextString(m) } func (*MsgEnableValidatorResponse) ProtoMessage() {} func (*MsgEnableValidatorResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_3d1df81e3a65fd6a, []int{9} + return fileDescriptor_89ab666760ce926c, []int{9} } func (m *MsgEnableValidatorResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -484,7 +484,7 @@ func (m *MsgRejectDisableValidator) Reset() { *m = MsgRejectDisableValid func (m *MsgRejectDisableValidator) String() string { return proto.CompactTextString(m) } func (*MsgRejectDisableValidator) ProtoMessage() {} func (*MsgRejectDisableValidator) Descriptor() ([]byte, []int) { - return fileDescriptor_3d1df81e3a65fd6a, []int{10} + return fileDescriptor_89ab666760ce926c, []int{10} } func (m *MsgRejectDisableValidator) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -548,7 +548,7 @@ func (m *MsgRejectDisableValidatorResponse) Reset() { *m = MsgRejectDisa func (m *MsgRejectDisableValidatorResponse) String() string { return proto.CompactTextString(m) } func (*MsgRejectDisableValidatorResponse) ProtoMessage() {} func (*MsgRejectDisableValidatorResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_3d1df81e3a65fd6a, []int{11} + return fileDescriptor_89ab666760ce926c, []int{11} } func (m *MsgRejectDisableValidatorResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -592,53 +592,55 @@ func init() { proto.RegisterType((*MsgRejectDisableValidatorResponse)(nil), "zigbeealliance.distributedcomplianceledger.validator.MsgRejectDisableValidatorResponse") } -func init() { proto.RegisterFile("validator/tx.proto", fileDescriptor_3d1df81e3a65fd6a) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/validator/tx.proto", fileDescriptor_89ab666760ce926c) +} -var fileDescriptor_3d1df81e3a65fd6a = []byte{ - // 678 bytes of a gzipped FileDescriptorProto +var fileDescriptor_89ab666760ce926c = []byte{ + // 682 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x96, 0x4d, 0x6b, 0x13, 0x41, - 0x18, 0xc7, 0x33, 0x6d, 0xa8, 0xed, 0xf4, 0xa0, 0x4c, 0x8b, 0x4d, 0x53, 0xdd, 0xd4, 0xd5, 0x43, - 0x41, 0xb2, 0x2b, 0xb5, 0x08, 0x16, 0x3c, 0x24, 0x56, 0x7c, 0x23, 0x58, 0x57, 0x50, 0x11, 0xa4, - 0xee, 0xcb, 0x74, 0x1c, 0xd9, 0xec, 0xac, 0x33, 0x93, 0xda, 0xf8, 0x09, 0x3c, 0xfa, 0x11, 0xfa, - 0x05, 0x04, 0x05, 0x3f, 0x81, 0x20, 0x14, 0x4f, 0xc5, 0x93, 0xa7, 0x22, 0xed, 0xc5, 0x8b, 0x17, - 0xfd, 0x02, 0x92, 0x7d, 0xc9, 0x6e, 0x37, 0x5d, 0x85, 0x6d, 0x7a, 0xf2, 0xb6, 0x9b, 0x27, 0xcf, - 0xef, 0xff, 0x3c, 0xff, 0x67, 0xe6, 0x61, 0x21, 0xda, 0x30, 0x5d, 0xea, 0x98, 0x92, 0x71, 0x5d, - 0x6e, 0x6a, 0x3e, 0x67, 0x92, 0xa1, 0xa5, 0xd7, 0x94, 0x58, 0x18, 0x9b, 0xae, 0x4b, 0x4d, 0xcf, - 0xc6, 0x9a, 0x43, 0x85, 0xe4, 0xd4, 0xea, 0x48, 0xec, 0xd8, 0xac, 0xed, 0x87, 0xbf, 0xba, 0xd8, - 0x21, 0x98, 0x6b, 0xfd, 0xf4, 0xea, 0x5c, 0x42, 0x72, 0xb0, 0xb0, 0x39, 0xf5, 0x25, 0x65, 0x5e, - 0x88, 0xac, 0xce, 0x12, 0xc6, 0x88, 0x8b, 0xf5, 0xe0, 0xcd, 0xea, 0xac, 0xeb, 0xa6, 0xd7, 0x8d, - 0x42, 0xd3, 0x84, 0x11, 0x16, 0x3c, 0xea, 0xbd, 0xa7, 0x38, 0xc1, 0x66, 0xa2, 0xcd, 0xc4, 0x5a, - 0x18, 0x08, 0x5f, 0xc2, 0x90, 0xfa, 0x61, 0x04, 0xa2, 0x96, 0x20, 0xd7, 0x39, 0x36, 0x25, 0x7e, - 0x18, 0x8b, 0xa2, 0x9b, 0x70, 0x4c, 0x50, 0xe2, 0x61, 0x5e, 0x01, 0xf3, 0x60, 0x61, 0xa2, 0xa9, - 0xff, 0xda, 0xad, 0x4d, 0x45, 0x35, 0xe1, 0x65, 0x95, 0xe3, 0x97, 0x1d, 0xca, 0xb1, 0xa3, 0x7e, - 0xfd, 0x58, 0x9f, 0x8e, 0x78, 0x0d, 0xc7, 0xe1, 0x58, 0x88, 0x07, 0x92, 0x53, 0x8f, 0x18, 0x51, - 0x3a, 0x7a, 0x0a, 0xc7, 0xfc, 0x8e, 0x75, 0x17, 0x77, 0x2b, 0x23, 0xf3, 0x60, 0x61, 0x72, 0x71, - 0x5a, 0x0b, 0x8b, 0xd7, 0xe2, 0xe2, 0xb5, 0x86, 0xd7, 0xcd, 0xc5, 0x7f, 0x49, 0xf0, 0x36, 0xef, - 0xfa, 0x92, 0x69, 0xab, 0x01, 0xcc, 0x88, 0xa0, 0x88, 0xc2, 0xc9, 0x94, 0x3f, 0x95, 0xd1, 0x40, - 0xa3, 0xa1, 0x15, 0xf1, 0x5c, 0x5b, 0x49, 0x40, 0xcd, 0xf2, 0xf6, 0x6e, 0xad, 0x64, 0xa4, 0xd9, - 0xcb, 0xe3, 0x6f, 0xb6, 0x6a, 0xa5, 0x1f, 0x5b, 0xb5, 0x92, 0x7a, 0x06, 0x56, 0x07, 0x2d, 0x33, - 0xb0, 0xf0, 0x99, 0x27, 0xb0, 0xfa, 0x1b, 0x04, 0xe1, 0x55, 0xce, 0x7c, 0x26, 0xf0, 0x0a, 0x15, - 0xa6, 0xe5, 0xa6, 0x9c, 0xbd, 0x0d, 0x4f, 0xd8, 0xbd, 0x4c, 0x56, 0xd8, 0xda, 0x38, 0xbf, 0x87, - 0x32, 0xc3, 0x48, 0x60, 0x6e, 0x11, 0x54, 0x94, 0x8f, 0x2e, 0xc2, 0x32, 0xf5, 0xd6, 0x59, 0x60, - 0xe0, 0x44, 0x73, 0xe6, 0x20, 0xa7, 0x6d, 0x6e, 0x5e, 0x5b, 0xba, 0x74, 0xf5, 0x8a, 0x6a, 0x04, - 0x7f, 0x42, 0x08, 0x96, 0x25, 0x6d, 0xe3, 0x4a, 0x79, 0x1e, 0x2c, 0x8c, 0x1a, 0xc1, 0xb3, 0x7a, - 0x01, 0xaa, 0xf9, 0x4d, 0x67, 0xbd, 0x69, 0xf8, 0x3e, 0x67, 0x1b, 0xff, 0x99, 0x37, 0x39, 0x4d, - 0xf7, 0xbd, 0x79, 0x06, 0xa7, 0x5a, 0x82, 0x1c, 0xa3, 0x27, 0xea, 0x59, 0x38, 0x77, 0x88, 0x42, - 0xbf, 0x80, 0xb5, 0x60, 0x13, 0xdc, 0xf0, 0x8e, 0x4d, 0x3f, 0xbc, 0x37, 0x19, 0x81, 0xbe, 0xfc, - 0x2b, 0x38, 0xdb, 0x12, 0xc4, 0xc0, 0x2f, 0xb0, 0x2d, 0x07, 0x5c, 0xa8, 0x64, 0xaa, 0x48, 0x06, - 0x5d, 0xc9, 0x0c, 0x3a, 0x99, 0x1b, 0x4a, 0xcf, 0xed, 0x2f, 0xe3, 0x39, 0x0f, 0xcf, 0xe5, 0x0a, - 0xc7, 0xd5, 0x2d, 0xfe, 0x1c, 0x87, 0xa3, 0x2d, 0x41, 0xd0, 0x3b, 0x00, 0x4f, 0x66, 0x97, 0xe5, - 0xad, 0x62, 0xfb, 0x66, 0x70, 0x87, 0x54, 0x57, 0x87, 0x45, 0x8a, 0xeb, 0x46, 0x9f, 0x01, 0x9c, - 0xc9, 0x5b, 0x45, 0xc5, 0xd5, 0x72, 0x88, 0xd5, 0xc7, 0xc3, 0x26, 0x1e, 0xe8, 0x23, 0x6f, 0x6d, - 0x14, 0xef, 0x23, 0x87, 0x78, 0x84, 0x3e, 0xfe, 0x71, 0xcb, 0xd1, 0x7b, 0x00, 0x4f, 0x0d, 0xde, - 0xf1, 0xc2, 0x72, 0x03, 0x95, 0xdf, 0x1f, 0x1a, 0xaa, 0x5f, 0x72, 0xef, 0xc8, 0x67, 0xb7, 0x42, - 0xf1, 0x23, 0x9f, 0x21, 0x1d, 0xe1, 0xc8, 0xe7, 0x2c, 0x12, 0xf4, 0x09, 0xc0, 0xd3, 0x39, 0x6b, - 0xe4, 0x5e, 0x61, 0xb1, 0xc3, 0x81, 0xd5, 0x47, 0x43, 0x06, 0xc6, 0x4d, 0x34, 0x9d, 0xed, 0x3d, - 0x05, 0xec, 0xec, 0x29, 0xe0, 0xfb, 0x9e, 0x02, 0xde, 0xee, 0x2b, 0xa5, 0x9d, 0x7d, 0xa5, 0xf4, - 0x6d, 0x5f, 0x29, 0x3d, 0xb9, 0x43, 0xa8, 0x7c, 0xde, 0xb1, 0x34, 0x9b, 0xb5, 0xf5, 0x50, 0xbc, - 0x1e, 0xab, 0xeb, 0x29, 0xf5, 0x7a, 0x22, 0x5f, 0x0f, 0xf5, 0xf5, 0x4d, 0x3d, 0xf5, 0x7d, 0xda, - 0xf5, 0xb1, 0xb0, 0xc6, 0x82, 0xaf, 0xb0, 0xcb, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x29, 0x83, - 0x28, 0x2c, 0xb9, 0x0a, 0x00, 0x00, + 0x18, 0xc7, 0x33, 0x6d, 0xa8, 0xed, 0xf4, 0xa0, 0x4c, 0x8b, 0x4d, 0x57, 0xdd, 0xd4, 0xd5, 0x43, + 0x41, 0xb2, 0x2b, 0xb5, 0x08, 0x16, 0x7a, 0x48, 0xac, 0xef, 0x04, 0xeb, 0x0a, 0x2a, 0x82, 0xd4, + 0x7d, 0x99, 0x8e, 0x23, 0x9b, 0x9d, 0x75, 0x66, 0x52, 0x1b, 0x3f, 0x81, 0x47, 0x3f, 0x42, 0xbf, + 0x80, 0xa0, 0xe0, 0x27, 0x10, 0x84, 0xe2, 0xa9, 0x78, 0xf2, 0x54, 0xa4, 0xbd, 0x78, 0xf1, 0xa2, + 0x5f, 0x40, 0xb2, 0x2f, 0x49, 0xba, 0xc9, 0x56, 0xd8, 0xa6, 0x27, 0x6f, 0xb3, 0x99, 0x3c, 0xbf, + 0xff, 0xf3, 0xfc, 0x9f, 0x99, 0x87, 0x81, 0xcb, 0x6f, 0x28, 0xb1, 0x31, 0xb6, 0x3c, 0x8f, 0x5a, + 0xbe, 0x83, 0x0d, 0x97, 0x0a, 0xc9, 0xa9, 0xdd, 0x94, 0xd8, 0x75, 0x58, 0x23, 0x88, 0x7e, 0xf5, + 0xb0, 0x4b, 0x30, 0x37, 0x36, 0x2c, 0x8f, 0xba, 0x96, 0x64, 0xdc, 0x90, 0x9b, 0x7a, 0xc0, 0x99, + 0x64, 0x68, 0xf1, 0x60, 0xb8, 0x7e, 0x48, 0xb8, 0xde, 0x09, 0x57, 0x6e, 0xe6, 0x12, 0x75, 0xb1, + 0x70, 0x38, 0x0d, 0x24, 0x65, 0x7e, 0xa4, 0xae, 0xcc, 0x12, 0xc6, 0x88, 0x87, 0x8d, 0xf0, 0xcb, + 0x6e, 0xae, 0x1b, 0x96, 0xdf, 0x8a, 0xb7, 0xa6, 0x09, 0x23, 0x2c, 0x5c, 0x1a, 0xed, 0x55, 0x12, + 0xe0, 0x30, 0xd1, 0x60, 0x62, 0x2d, 0xda, 0x88, 0x3e, 0xa2, 0x2d, 0xed, 0xe3, 0x08, 0x44, 0x75, + 0x41, 0xae, 0x73, 0x6c, 0x49, 0xfc, 0x28, 0x11, 0x45, 0xb7, 0xe0, 0x98, 0xa0, 0xc4, 0xc7, 0xbc, + 0x04, 0xe6, 0xc0, 0xfc, 0x44, 0xcd, 0xf8, 0xbd, 0x5b, 0x9e, 0x8a, 0x73, 0xc2, 0x4b, 0x1a, 0xc7, + 0xaf, 0x9a, 0x94, 0x63, 0x57, 0xfb, 0xf6, 0xa9, 0x32, 0x1d, 0xf3, 0xaa, 0xae, 0xcb, 0xb1, 0x10, + 0x0f, 0x25, 0xa7, 0x3e, 0x31, 0xe3, 0x70, 0xf4, 0x0c, 0x8e, 0x05, 0x4d, 0xfb, 0x1e, 0x6e, 0x95, + 0x46, 0xe6, 0xc0, 0xfc, 0xe4, 0xc2, 0xb4, 0x1e, 0x25, 0xaf, 0x27, 0xc9, 0xeb, 0x55, 0xbf, 0x95, + 0x89, 0xff, 0xda, 0xc5, 0x3b, 0xbc, 0x15, 0x48, 0xa6, 0xaf, 0x86, 0x30, 0x33, 0x86, 0x22, 0x0a, + 0x27, 0x7b, 0xfc, 0x29, 0x8d, 0x86, 0x1a, 0x55, 0x3d, 0x4f, 0x7b, 0xf4, 0x95, 0x2e, 0xa8, 0x56, + 0xdc, 0xde, 0x2d, 0x17, 0xcc, 0x5e, 0xf6, 0xd2, 0xf8, 0xdb, 0xad, 0x72, 0xe1, 0xe7, 0x56, 0xb9, + 0xa0, 0x9d, 0x85, 0x4a, 0xbf, 0x65, 0x26, 0x16, 0x01, 0xf3, 0x05, 0xd6, 0xfe, 0x80, 0x70, 0x7b, + 0x95, 0xb3, 0x80, 0x09, 0xbc, 0x42, 0x85, 0x65, 0x7b, 0x3d, 0xce, 0xde, 0x81, 0x27, 0x9c, 0x76, + 0x24, 0xcb, 0x6d, 0x6d, 0x12, 0xdf, 0x46, 0x59, 0xd1, 0x4e, 0x68, 0x6e, 0x1e, 0x54, 0x1c, 0x8f, + 0x2e, 0xc1, 0x22, 0xf5, 0xd7, 0x59, 0x68, 0xe0, 0x44, 0x6d, 0xe6, 0x20, 0xa7, 0x61, 0x6d, 0x2e, + 0x2f, 0x5e, 0xbe, 0x76, 0x55, 0x33, 0xc3, 0x3f, 0x21, 0x04, 0x8b, 0x92, 0x36, 0x70, 0xa9, 0x38, + 0x07, 0xe6, 0x47, 0xcd, 0x70, 0xad, 0x5d, 0x84, 0x5a, 0x76, 0xd1, 0x69, 0x6f, 0xaa, 0x41, 0xc0, + 0xd9, 0xc6, 0x7f, 0xe6, 0x4d, 0x46, 0xd1, 0x1d, 0x6f, 0x9e, 0xc3, 0xa9, 0xba, 0x20, 0xc7, 0xe8, + 0x89, 0x76, 0x0e, 0x9e, 0x19, 0xa0, 0xd0, 0x49, 0x60, 0x2d, 0x9c, 0x04, 0x37, 0xfc, 0x63, 0xd3, + 0x8f, 0xee, 0x4d, 0x4a, 0xa0, 0x23, 0xff, 0x1a, 0xce, 0xd6, 0x05, 0x31, 0xf1, 0x4b, 0xec, 0xc8, + 0x3e, 0x17, 0x4a, 0xa9, 0x2c, 0xba, 0x8d, 0x2e, 0xa5, 0x1a, 0xdd, 0xed, 0x1b, 0xea, 0xed, 0xdb, + 0x21, 0xed, 0xb9, 0x00, 0xcf, 0x67, 0x0a, 0x27, 0xd9, 0x2d, 0xfc, 0x1a, 0x87, 0xa3, 0x75, 0x41, + 0xd0, 0x7b, 0x00, 0x4f, 0xa6, 0x87, 0xe5, 0xed, 0x7c, 0xf3, 0xa6, 0x7f, 0x86, 0x28, 0xab, 0xc3, + 0x22, 0x25, 0x79, 0xa3, 0x2f, 0x00, 0xce, 0x64, 0x8d, 0xa2, 0xfc, 0x6a, 0x19, 0x44, 0xe5, 0xc9, + 0xb0, 0x89, 0x07, 0xea, 0xc8, 0x1a, 0x1b, 0xf9, 0xeb, 0xc8, 0x20, 0x1e, 0xa1, 0x8e, 0x7f, 0xdc, + 0x72, 0xf4, 0x01, 0xc0, 0x53, 0xfd, 0x77, 0x3c, 0xb7, 0x5c, 0x5f, 0xe6, 0x0f, 0x86, 0x86, 0xea, + 0xa4, 0xdc, 0x3e, 0xf2, 0xe9, 0xa9, 0x90, 0xff, 0xc8, 0xa7, 0x48, 0x47, 0x38, 0xf2, 0x19, 0x83, + 0x04, 0x7d, 0x06, 0xf0, 0x74, 0xc6, 0x18, 0xb9, 0x9f, 0x5b, 0x6c, 0x30, 0x50, 0x79, 0x3c, 0x64, + 0x60, 0x52, 0x44, 0xcd, 0xdd, 0xde, 0x53, 0xc1, 0xce, 0x9e, 0x0a, 0x7e, 0xec, 0xa9, 0xe0, 0xdd, + 0xbe, 0x5a, 0xd8, 0xd9, 0x57, 0x0b, 0xdf, 0xf7, 0xd5, 0xc2, 0xd3, 0xbb, 0x84, 0xca, 0x17, 0x4d, + 0x5b, 0x77, 0x58, 0xc3, 0x88, 0xc4, 0x2b, 0x83, 0x5e, 0x94, 0x95, 0xae, 0x7c, 0x25, 0x7e, 0x53, + 0x6e, 0xf6, 0x3e, 0x65, 0x5b, 0x01, 0x16, 0xf6, 0x58, 0xf8, 0x0a, 0xbb, 0xf2, 0x37, 0x00, 0x00, + 0xff, 0xff, 0xe8, 0xf9, 0x97, 0xcd, 0x0f, 0x0b, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -898,7 +900,7 @@ var _Msg_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "validator/tx.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/validator/tx.proto", } func (m *MsgCreateValidator) Marshal() (dAtA []byte, err error) { diff --git a/x/validator/types/validator.go b/x/validator/types/validator.go index 7e5af1241..2618e02ab 100644 --- a/x/validator/types/validator.go +++ b/x/validator/types/validator.go @@ -1,14 +1,15 @@ package types import ( + "cosmossdk.io/errors" + abci "github.com/cometbft/cometbft/abci/types" + tmprotocrypto "github.com/cometbft/cometbft/proto/tendermint/crypto" "github.com/cosmos/cosmos-sdk/codec" codectypes "github.com/cosmos/cosmos-sdk/codec/types" cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" - abci "github.com/tendermint/tendermint/abci/types" - tmprotocrypto "github.com/tendermint/tendermint/proto/tendermint/crypto" "sigs.k8s.io/yaml" ) @@ -45,7 +46,7 @@ func NewValidator(owner sdk.ValAddress, pubKey cryptotypes.PubKey, description D func (v Validator) GetConsAddress() (sdk.ConsAddress, error) { pk, ok := v.PubKey.GetCachedValue().(cryptotypes.PubKey) if !ok { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidType, "expecting cryptotypes.PubKey, got %T", pk) + return nil, errors.Wrapf(sdkerrors.ErrInvalidType, "expecting cryptotypes.PubKey, got %T", pk) } return sdk.ConsAddress(pk.Address()), nil @@ -54,7 +55,7 @@ func (v Validator) GetConsAddress() (sdk.ConsAddress, error) { func (v Validator) GetConsPubKey() (cryptotypes.PubKey, error) { pk, ok := v.PubKey.GetCachedValue().(cryptotypes.PubKey) if !ok { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidType, "expecting cryptotypes.PubKey, got %T", pk) + return nil, errors.Wrapf(sdkerrors.ErrInvalidType, "expecting cryptotypes.PubKey, got %T", pk) } return pk, nil @@ -159,7 +160,7 @@ func (v Validator) TmConsPublicKey() (tmprotocrypto.PublicKey, error) { func (v Validator) GetConsAddr() (sdk.ConsAddress, error) { pk, ok := v.PubKey.GetCachedValue().(cryptotypes.PubKey) if !ok { - return nil, sdkerrors.Wrapf(sdkerrors.ErrInvalidType, "expecting cryptotypes.PubKey, got %T", pk) + return nil, errors.Wrapf(sdkerrors.ErrInvalidType, "expecting cryptotypes.PubKey, got %T", pk) } return sdk.ConsAddress(pk.Address()), nil @@ -194,31 +195,31 @@ const ( // Ensure the length of a validator's description. func (d Description) Validate() error { if len(d.Moniker) == 0 { - return sdkerrors.Wrap(sdkerrors.ErrUnknownRequest, "Invalid Description Moniker: it cannot be empty") + return errors.Wrap(sdkerrors.ErrUnknownRequest, "Invalid Description Moniker: it cannot be empty") } if len(d.Moniker) > MaxNameLength { - return sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, + return errors.Wrapf(sdkerrors.ErrUnknownRequest, "Invalid Description Moniker: received string of length %v, max is %v", len(d.Moniker), MaxNameLength, ) } if len(d.Identity) > MaxIdentityLength { - return sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, + return errors.Wrapf(sdkerrors.ErrUnknownRequest, "Invalid Description Identity: "+ "received string of length %v, max is %v", len(d.Identity), MaxIdentityLength, ) } if len(d.Website) > MaxWebsiteLength { - return sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, + return errors.Wrapf(sdkerrors.ErrUnknownRequest, "Invalid Description Website: "+ "received string of length %v, max is %v", len(d.Website), MaxWebsiteLength, ) } if len(d.Details) > MaxDetailsLength { - return sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, + return errors.Wrapf(sdkerrors.ErrUnknownRequest, "Invalid Description Details: received string of length %v, max is %v", len(d.Details), MaxDetailsLength, ) diff --git a/x/validator/types/validator.pb.go b/x/validator/types/validator.pb.go index d9c7df1b5..009048f5c 100644 --- a/x/validator/types/validator.pb.go +++ b/x/validator/types/validator.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: validator/validator.proto +// source: zigbeealliance/distributedcomplianceledger/validator/validator.proto package types @@ -7,8 +7,8 @@ import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" types "github.com/cosmos/cosmos-sdk/codec/types" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -43,7 +43,7 @@ type Validator struct { func (m *Validator) Reset() { *m = Validator{} } func (*Validator) ProtoMessage() {} func (*Validator) Descriptor() ([]byte, []int) { - return fileDescriptor_e972accd7c1f0747, []int{0} + return fileDescriptor_dac29b5a8b0a342e, []int{0} } func (m *Validator) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -76,35 +76,37 @@ func init() { proto.RegisterType((*Validator)(nil), "zigbeealliance.distributedcomplianceledger.validator.Validator") } -func init() { proto.RegisterFile("validator/validator.proto", fileDescriptor_e972accd7c1f0747) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/validator/validator.proto", fileDescriptor_dac29b5a8b0a342e) +} -var fileDescriptor_e972accd7c1f0747 = []byte{ - // 394 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x92, 0xb1, 0x8e, 0xd4, 0x30, - 0x10, 0x86, 0xe3, 0x83, 0x44, 0x9c, 0xef, 0xaa, 0x28, 0x42, 0xde, 0x43, 0xca, 0x46, 0x57, 0xa5, - 0x89, 0x2d, 0x01, 0x15, 0xdd, 0xae, 0x10, 0x05, 0x34, 0x28, 0x48, 0x14, 0x34, 0x28, 0x89, 0x8d, - 0x31, 0xca, 0x66, 0x22, 0xdb, 0xe1, 0x08, 0x4f, 0x40, 0x49, 0x49, 0xb9, 0x0f, 0xc1, 0x43, 0x9c, - 0xa8, 0x56, 0x54, 0x94, 0x68, 0xb7, 0xe1, 0x31, 0xd0, 0xc6, 0xd9, 0xcd, 0xd2, 0xd2, 0xfd, 0x33, - 0xbf, 0xe7, 0xd7, 0xcc, 0x27, 0xe3, 0xd9, 0xc7, 0xa2, 0x56, 0xbc, 0xb0, 0xa0, 0xd9, 0x51, 0xd1, - 0x56, 0x83, 0x85, 0xf0, 0xf1, 0x67, 0x25, 0x4b, 0x21, 0x8a, 0xba, 0x56, 0x45, 0x53, 0x09, 0xca, - 0x95, 0xb1, 0x5a, 0x95, 0x9d, 0x15, 0xbc, 0x82, 0x55, 0xeb, 0xba, 0xb5, 0xe0, 0x52, 0x68, 0x7a, - 0x9c, 0xbd, 0x9a, 0x49, 0x00, 0x59, 0x0b, 0x36, 0x64, 0x94, 0xdd, 0x3b, 0x56, 0x34, 0xbd, 0x0b, - 0xbc, 0x8a, 0x24, 0x48, 0x18, 0x24, 0xdb, 0xab, 0xb1, 0x3b, 0xab, 0xc0, 0xac, 0xc0, 0xbc, 0x75, - 0x86, 0x2b, 0x46, 0xeb, 0xc1, 0xb4, 0x1c, 0x17, 0xa6, 0xd2, 0xaa, 0xb5, 0x0a, 0x1a, 0x67, 0x5e, - 0xdf, 0x9e, 0xe1, 0xf3, 0xd7, 0x07, 0x3f, 0xa4, 0xd8, 0x87, 0x9b, 0x46, 0x68, 0x82, 0x12, 0x94, - 0x9e, 0x2f, 0xc9, 0xcf, 0xef, 0x59, 0x34, 0x66, 0x2d, 0x38, 0xd7, 0xc2, 0x98, 0x57, 0x56, 0xab, - 0x46, 0xe6, 0xee, 0x59, 0x58, 0xe1, 0x8b, 0x93, 0x48, 0x72, 0x96, 0xa0, 0xf4, 0xe2, 0xe1, 0x82, - 0xfe, 0xcf, 0xc9, 0xf4, 0xe9, 0x14, 0x94, 0x9f, 0xa6, 0x86, 0xcf, 0x70, 0xd0, 0x76, 0xe5, 0x0b, - 0xd1, 0x93, 0x3b, 0x43, 0x7e, 0x44, 0x1d, 0x1c, 0x7a, 0x80, 0x43, 0x17, 0x4d, 0xbf, 0x24, 0x3f, - 0xa6, 0x5d, 0x2b, 0xdd, 0xb7, 0x16, 0xe8, 0xcb, 0x61, 0x2a, 0x1f, 0xa7, 0xc3, 0x08, 0xfb, 0x2d, - 0xdc, 0x08, 0x4d, 0xee, 0x26, 0x28, 0xf5, 0x73, 0x57, 0x84, 0xf7, 0x71, 0xf0, 0xa1, 0x50, 0xb5, - 0xe0, 0xc4, 0x4f, 0x50, 0x7a, 0x2f, 0x1f, 0xab, 0xf0, 0x1a, 0x5f, 0x3a, 0x95, 0x8b, 0xc2, 0x40, - 0x43, 0x82, 0x3d, 0x91, 0xfc, 0x9f, 0xde, 0x93, 0xcb, 0x2f, 0xeb, 0xb9, 0xf7, 0x6d, 0x3d, 0xf7, - 0xfe, 0xac, 0xe7, 0xde, 0x92, 0xdf, 0x6e, 0x63, 0xb4, 0xd9, 0xc6, 0xe8, 0xf7, 0x36, 0x46, 0x5f, - 0x77, 0xb1, 0xb7, 0xd9, 0xc5, 0xde, 0xaf, 0x5d, 0xec, 0xbd, 0x79, 0x2e, 0x95, 0x7d, 0xdf, 0x95, - 0xb4, 0x82, 0x15, 0x73, 0x6c, 0xb2, 0x03, 0x1c, 0x76, 0x02, 0x27, 0x9b, 0xe8, 0x64, 0x0e, 0x0f, - 0xfb, 0x34, 0xfd, 0x27, 0x66, 0xfb, 0x56, 0x98, 0x32, 0x18, 0xae, 0x7e, 0xf4, 0x37, 0x00, 0x00, - 0xff, 0xff, 0x58, 0x0e, 0xb0, 0x72, 0x73, 0x02, 0x00, 0x00, +var fileDescriptor_dac29b5a8b0a342e = []byte{ + // 398 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x91, 0xb1, 0x8e, 0xd3, 0x40, + 0x10, 0x86, 0xbd, 0x07, 0xb6, 0xb8, 0xbd, 0xab, 0x2c, 0x0b, 0xed, 0x5d, 0xe1, 0x58, 0x57, 0xb9, + 0xf1, 0x5a, 0x02, 0x2a, 0xba, 0x44, 0xa7, 0x2b, 0xa0, 0x41, 0x46, 0xa2, 0xa0, 0x41, 0xb6, 0x77, + 0x59, 0x16, 0x39, 0x1e, 0x6b, 0x77, 0x4d, 0x30, 0x4f, 0x40, 0x49, 0x49, 0x99, 0x87, 0xe0, 0x21, + 0x22, 0xaa, 0x88, 0x8a, 0x12, 0x25, 0x0d, 0x8f, 0x81, 0xe2, 0x75, 0x88, 0x23, 0x21, 0x8a, 0x74, + 0xff, 0xcc, 0x78, 0x3e, 0xef, 0xfc, 0x3f, 0xbe, 0xfd, 0x24, 0x45, 0xc1, 0x79, 0x5e, 0x55, 0x32, + 0xaf, 0x4b, 0x9e, 0x32, 0xa9, 0x8d, 0x92, 0x45, 0x6b, 0x38, 0x2b, 0x61, 0xde, 0xd8, 0x6e, 0xc5, + 0x99, 0xe0, 0x2a, 0xfd, 0x90, 0x57, 0x92, 0xe5, 0x06, 0x46, 0x8a, 0x36, 0x0a, 0x0c, 0xf8, 0x4f, + 0x8e, 0x29, 0xf4, 0x3f, 0x14, 0xfa, 0x77, 0xf7, 0xfa, 0x4a, 0x00, 0x88, 0x8a, 0xa7, 0x3d, 0xa3, + 0x68, 0xdf, 0xa6, 0x79, 0xdd, 0x59, 0xe0, 0x75, 0x20, 0x40, 0x40, 0x2f, 0xd3, 0x9d, 0x1a, 0xba, + 0x57, 0x25, 0xe8, 0x39, 0xe8, 0x37, 0x76, 0x60, 0x8b, 0x61, 0x74, 0x77, 0xd2, 0x1d, 0x8c, 0xeb, + 0x52, 0xc9, 0xc6, 0x48, 0xa8, 0x2d, 0xe7, 0x66, 0x75, 0x86, 0xcf, 0x5f, 0xed, 0xe7, 0x3e, 0xc5, + 0x2e, 0x2c, 0x6a, 0xae, 0x08, 0x8a, 0x50, 0x7c, 0x3e, 0x23, 0x3f, 0xbe, 0x25, 0xc1, 0xf0, 0xdb, + 0x29, 0x63, 0x8a, 0x6b, 0xfd, 0xd2, 0x28, 0x59, 0x8b, 0xcc, 0x7e, 0xe6, 0x97, 0xf8, 0x62, 0x84, + 0x24, 0x67, 0x11, 0x8a, 0x2f, 0x1e, 0x4d, 0xe9, 0x29, 0xee, 0xd0, 0xdb, 0x03, 0x28, 0x1b, 0x53, + 0xfd, 0x3b, 0xec, 0x35, 0x6d, 0xf1, 0x9c, 0x77, 0xe4, 0x5e, 0xcf, 0x0f, 0xa8, 0xf5, 0x91, 0xee, + 0x7d, 0xa4, 0xd3, 0xba, 0x9b, 0x91, 0xef, 0x87, 0xb7, 0x96, 0xaa, 0x6b, 0x0c, 0xd0, 0x17, 0xfd, + 0x56, 0x36, 0x6c, 0xfb, 0x01, 0x76, 0x1b, 0x58, 0x70, 0x45, 0xee, 0x47, 0x28, 0x76, 0x33, 0x5b, + 0xf8, 0x0f, 0xb1, 0xf7, 0x3e, 0x97, 0x15, 0x67, 0xc4, 0x8d, 0x50, 0xfc, 0x20, 0x1b, 0x2a, 0xff, + 0x06, 0x5f, 0x5a, 0x95, 0xf1, 0x5c, 0x43, 0x4d, 0xbc, 0x9d, 0x23, 0xd9, 0x51, 0xef, 0xe9, 0xe5, + 0xe7, 0xe5, 0xc4, 0xf9, 0xba, 0x9c, 0x38, 0xbf, 0x97, 0x13, 0x67, 0xc6, 0x56, 0x9b, 0x10, 0xad, + 0x37, 0x21, 0xfa, 0xb5, 0x09, 0xd1, 0x97, 0x6d, 0xe8, 0xac, 0xb7, 0xa1, 0xf3, 0x73, 0x1b, 0x3a, + 0xaf, 0x9f, 0x09, 0x69, 0xde, 0xb5, 0x05, 0x2d, 0x61, 0x9e, 0x5a, 0x6f, 0x92, 0x7f, 0x05, 0x97, + 0x1c, 0xdc, 0x49, 0x86, 0xe8, 0x3e, 0x8e, 0xc2, 0x33, 0x5d, 0xc3, 0x75, 0xe1, 0xf5, 0x57, 0x3f, + 0xfe, 0x13, 0x00, 0x00, 0xff, 0xff, 0x99, 0xb1, 0x7f, 0xad, 0xc9, 0x02, 0x00, 0x00, } func (m *Validator) Marshal() (dAtA []byte, err error) { diff --git a/x/vendorinfo/client/cli/query.go b/x/vendorinfo/client/cli/query.go index 61dcd0b0d..6d0a1e57c 100644 --- a/x/vendorinfo/client/cli/query.go +++ b/x/vendorinfo/client/cli/query.go @@ -13,7 +13,7 @@ import ( ) // GetQueryCmd returns the cli query commands for this module. -func GetQueryCmd(queryRoute string) *cobra.Command { +func GetQueryCmd() *cobra.Command { // Group vendorinfo queries under a subcommand cmd := &cobra.Command{ Use: types.ModuleName, diff --git a/x/vendorinfo/client/cli/query_vendor_info.go b/x/vendorinfo/client/cli/query_vendor_info.go index 6da8acd2d..a5f8d1d39 100644 --- a/x/vendorinfo/client/cli/query_vendor_info.go +++ b/x/vendorinfo/client/cli/query_vendor_info.go @@ -15,7 +15,10 @@ func CmdListVendorInfo() *cobra.Command { Use: "all-vendors", Short: "Get information about all vendors", RunE: func(cmd *cobra.Command, args []string) error { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { @@ -54,7 +57,10 @@ func CmdShowVendorInfo() *cobra.Command { Short: "Get vendor details for the given vendorID", Args: cobra.ExactArgs(0), RunE: func(cmd *cobra.Command, args []string) (err error) { - clientCtx := client.GetClientContextFromCmd(cmd) + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } var res types.VendorInfo return cli.QueryWithProof( diff --git a/x/vendorinfo/client/cli/query_vendor_info_test.go b/x/vendorinfo/client/cli/query_vendor_info_test.go index 22b30c4c6..ec3634895 100644 --- a/x/vendorinfo/client/cli/query_vendor_info_test.go +++ b/x/vendorinfo/client/cli/query_vendor_info_test.go @@ -9,7 +9,7 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" "github.com/stretchr/testify/require" - tmcli "github.com/tendermint/tendermint/libs/cli" + tmcli "github.com/cometbft/cometbft/libs/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/network" "github.com/zigbee-alliance/distributed-compliance-ledger/x/vendorinfo/client/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/x/vendorinfo/types" diff --git a/x/vendorinfo/handler.go b/x/vendorinfo/handler.go index 272b9d7e2..47ab766c9 100644 --- a/x/vendorinfo/handler.go +++ b/x/vendorinfo/handler.go @@ -3,6 +3,7 @@ package vendorinfo import ( "fmt" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/x/vendorinfo/keeper" @@ -32,7 +33,7 @@ func NewHandler(k keeper.Keeper) sdk.Handler { default: errMsg := fmt.Sprintf("unrecognized %s message type: %T", types.ModuleName, msg) - return nil, sdkerrors.Wrap(sdkerrors.ErrUnknownRequest, errMsg) + return nil, errors.Wrap(sdkerrors.ErrUnknownRequest, errMsg) } } } diff --git a/x/vendorinfo/keeper/check_vendor_rights.go b/x/vendorinfo/keeper/check_vendor_rights.go index d24c1ca8e..b2d9dc0f8 100644 --- a/x/vendorinfo/keeper/check_vendor_rights.go +++ b/x/vendorinfo/keeper/check_vendor_rights.go @@ -3,24 +3,26 @@ package keeper import ( "fmt" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + dclauthtypes "github.com/zigbee-alliance/distributed-compliance-ledger/x/dclauth/types" ) -func checkAddVendorRights(ctx sdk.Context, k Keeper, signer sdk.AccAddress, vid int32) error { +func checkAddVendorRights(ctx sdk.Context, k Keeper, signer sdk.AccAddress, vid int32) error { //nolint:goconst // sender must have Vendor role and VendorID or VendorAdmin role to add a new vendorinfo isVendor := k.dclauthKeeper.HasRole(ctx, signer, dclauthtypes.Vendor) isVendorAdmin := k.dclauthKeeper.HasRole(ctx, signer, dclauthtypes.VendorAdmin) if !isVendor && !isVendorAdmin { - return sdkerrors.Wrap(sdkerrors.ErrUnauthorized, fmt.Sprintf("MsgAddVendorInfo transaction should be "+ + return errors.Wrap(sdkerrors.ErrUnauthorized, fmt.Sprintf("MsgAddVendorInfo transaction should be "+ "signed by an account with the %s or %s roles", dclauthtypes.Vendor, dclauthtypes.VendorAdmin)) } - // Checks if the the msg creator is the same as the current owner + // Checks if the msg creator is the same as the current owner if isVendor && !k.dclauthKeeper.HasVendorID(ctx, signer, vid) { - return sdkerrors.Wrap(sdkerrors.ErrUnauthorized, fmt.Sprintf("MsgAddVendorInfo transaction should be "+ + return errors.Wrap(sdkerrors.ErrUnauthorized, fmt.Sprintf("MsgAddVendorInfo transaction should be "+ "signed by a vendor account associated with the vendorID %v ", vid)) } @@ -33,13 +35,13 @@ func checkUpdateVendorRights(ctx sdk.Context, k Keeper, signer sdk.AccAddress, v isVendorAdmin := k.dclauthKeeper.HasRole(ctx, signer, dclauthtypes.VendorAdmin) if !isVendor && !isVendorAdmin { - return sdkerrors.Wrap(sdkerrors.ErrUnauthorized, fmt.Sprintf("MsgAddVendorInfo transaction should be "+ + return errors.Wrap(sdkerrors.ErrUnauthorized, fmt.Sprintf("MsgAddVendorInfo transaction should be "+ "signed by an account with the %s or %s roles", dclauthtypes.Vendor, dclauthtypes.VendorAdmin)) } - // Checks if the the msg creator is the same as the current owner + // Checks if the msg creator is the same as the current owner if isVendor && !k.dclauthKeeper.HasVendorID(ctx, signer, vid) { - return sdkerrors.Wrap(sdkerrors.ErrUnauthorized, fmt.Sprintf("MsgAddVendorInfo transaction should be "+ + return errors.Wrap(sdkerrors.ErrUnauthorized, fmt.Sprintf("MsgAddVendorInfo transaction should be "+ "signed by a vendor account associated with the vendorID %v ", vid)) } diff --git a/x/vendorinfo/keeper/keeper.go b/x/vendorinfo/keeper/keeper.go index 5be62eaea..347fc9156 100644 --- a/x/vendorinfo/keeper/keeper.go +++ b/x/vendorinfo/keeper/keeper.go @@ -3,17 +3,18 @@ package keeper import ( "fmt" + "github.com/cometbft/cometbft/libs/log" "github.com/cosmos/cosmos-sdk/codec" + storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/tendermint/tendermint/libs/log" "github.com/zigbee-alliance/distributed-compliance-ledger/x/vendorinfo/types" ) type ( Keeper struct { cdc codec.BinaryCodec - storeKey sdk.StoreKey - memKey sdk.StoreKey + storeKey storetypes.StoreKey + memKey storetypes.StoreKey dclauthKeeper types.DclauthKeeper } @@ -22,7 +23,7 @@ type ( func NewKeeper( cdc codec.BinaryCodec, storeKey, - memKey sdk.StoreKey, + memKey storetypes.StoreKey, dclauthKeeper types.DclauthKeeper, ) *Keeper { diff --git a/x/vendorinfo/keeper/msg_server_vendor_info.go b/x/vendorinfo/keeper/msg_server_vendor_info.go index 57186deb4..78c4d519f 100644 --- a/x/vendorinfo/keeper/msg_server_vendor_info.go +++ b/x/vendorinfo/keeper/msg_server_vendor_info.go @@ -3,8 +3,10 @@ package keeper import ( "context" + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/zigbee-alliance/distributed-compliance-ledger/x/vendorinfo/types" ) @@ -23,7 +25,7 @@ func (k msgServer) CreateVendorInfo(goCtx context.Context, msg *types.MsgCreateV ) if isFound { - return nil, sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "index already set") + return nil, errors.Wrap(sdkerrors.ErrInvalidRequest, "index already set") } vendorInfo := types.VendorInfo{ @@ -59,7 +61,7 @@ func (k msgServer) UpdateVendorInfo(goCtx context.Context, msg *types.MsgUpdateV ) if !isFound { - return nil, sdkerrors.Wrap(sdkerrors.ErrKeyNotFound, "index not set") + return nil, errors.Wrap(sdkerrors.ErrKeyNotFound, "index not set") } // check if creator has enough rights to update vendorinfo @@ -109,7 +111,7 @@ func (k msgServer) UpdateVendorInfo(goCtx context.Context, msg *types.MsgUpdateV // return nil, sdkerrors.Wrap(sdkerrors.ErrKeyNotFound, "index not set") // } -// // Checks if the the msg creator is the same as the current owner +// // Checks if the msg creator is the same as the current owner // if msg.Creator != valFound.Creator { // return nil, sdkerrors.Wrap(sdkerrors.ErrUnauthorized, "incorrect owner") // } diff --git a/x/vendorinfo/module.go b/x/vendorinfo/module.go index c2dd0086b..92c5e5651 100644 --- a/x/vendorinfo/module.go +++ b/x/vendorinfo/module.go @@ -5,6 +5,7 @@ import ( "encoding/json" "fmt" + abci "github.com/cometbft/cometbft/abci/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/codec" cdctypes "github.com/cosmos/cosmos-sdk/codec/types" @@ -13,7 +14,6 @@ import ( "github.com/gorilla/mux" "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" - abci "github.com/tendermint/tendermint/abci/types" "github.com/zigbee-alliance/distributed-compliance-ledger/x/vendorinfo/client/cli" "github.com/zigbee-alliance/distributed-compliance-ledger/x/vendorinfo/keeper" "github.com/zigbee-alliance/distributed-compliance-ledger/x/vendorinfo/types" @@ -61,7 +61,7 @@ func (AppModuleBasic) DefaultGenesis(cdc codec.JSONCodec) json.RawMessage { } // ValidateGenesis performs genesis state validation for the vendorinfo module. -func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncodingConfig, bz json.RawMessage) error { +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, _ client.TxEncodingConfig, bz json.RawMessage) error { var genState types.GenesisState if err := cdc.UnmarshalJSON(bz, &genState); err != nil { return fmt.Errorf("failed to unmarshal %s genesis state: %w", types.ModuleName, err) @@ -71,8 +71,7 @@ func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncod } // RegisterRESTRoutes registers the vendorinfo module's REST service handlers. -func (AppModuleBasic) RegisterRESTRoutes(clientCtx client.Context, rtr *mux.Router) { -} +func (AppModuleBasic) RegisterRESTRoutes(_ client.Context, _ *mux.Router) {} // RegisterGRPCGatewayRoutes registers the gRPC Gateway routes for the module. func (AppModuleBasic) RegisterGRPCGatewayRoutes(clientCtx client.Context, mux *runtime.ServeMux) { @@ -86,7 +85,7 @@ func (a AppModuleBasic) GetTxCmd() *cobra.Command { // GetQueryCmd returns the vendorinfo module's root query command. func (AppModuleBasic) GetQueryCmd() *cobra.Command { - return cli.GetQueryCmd(types.StoreKey) + return cli.GetQueryCmd() } // ---------------------------------------------------------------------------- @@ -112,19 +111,6 @@ func (am AppModule) Name() string { return am.AppModuleBasic.Name() } -// Route returns the vendorinfo module's message routing key. -func (am AppModule) Route() sdk.Route { - return sdk.NewRoute(types.RouterKey, NewHandler(am.keeper)) -} - -// QuerierRoute returns the vendorinfo module's query routing key. -func (AppModule) QuerierRoute() string { return types.QuerierRoute } - -// LegacyQuerierHandler returns the vendorinfo module's Querier. -func (am AppModule) LegacyQuerierHandler(legacyQuerierCdc *codec.LegacyAmino) sdk.Querier { - return nil -} - // RegisterServices registers a GRPC query service to respond to the // module-specific GRPC queries. func (am AppModule) RegisterServices(cfg module.Configurator) { diff --git a/x/vendorinfo/types/codec.go b/x/vendorinfo/types/codec.go index bc39ae8ca..10f806ea6 100644 --- a/x/vendorinfo/types/codec.go +++ b/x/vendorinfo/types/codec.go @@ -22,7 +22,7 @@ func RegisterInterfaces(registry cdctypes.InterfaceRegistry) { ) // this line is used by starport scaffolding # 3 - msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) + msgservice.RegisterMsgServiceDesc(registry, &_Msg_serviceDesc) //nolint:nosnakecase } var ModuleCdc = codec.NewProtoCodec(cdctypes.NewInterfaceRegistry()) diff --git a/x/vendorinfo/types/errors.go b/x/vendorinfo/types/errors.go index bbecb0371..9e9636f80 100644 --- a/x/vendorinfo/types/errors.go +++ b/x/vendorinfo/types/errors.go @@ -3,14 +3,14 @@ package types // DONTCOVER import ( - sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "cosmossdk.io/errors" ) // x/vendorinfo module sentinel errors. var ( DefaultCodespace = ModuleName - CodeVendorDoesNotExist = sdkerrors.Register(ModuleName, 701, "Code vendor does not exist") - CodeMissingVendorIDForVendorAccount = sdkerrors.Register(ModuleName, 702, "Code missing vendor id for vendor account") - CodeVendorInfoAlreadyExists = sdkerrors.Register(ModuleName, 703, "Code vendorinfo already exists") + CodeVendorDoesNotExist = errors.Register(ModuleName, 701, "Code vendor does not exist") + CodeMissingVendorIDForVendorAccount = errors.Register(ModuleName, 702, "Code missing vendor id for vendor account") + CodeVendorInfoAlreadyExists = errors.Register(ModuleName, 703, "Code vendorinfo already exists") ) diff --git a/x/vendorinfo/types/genesis.pb.go b/x/vendorinfo/types/genesis.pb.go index 5d604acf9..1ba73d37e 100644 --- a/x/vendorinfo/types/genesis.pb.go +++ b/x/vendorinfo/types/genesis.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: vendorinfo/genesis.proto +// source: zigbeealliance/distributedcomplianceledger/vendorinfo/genesis.proto package types import ( fmt "fmt" - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -32,7 +32,7 @@ func (m *GenesisState) Reset() { *m = GenesisState{} } func (m *GenesisState) String() string { return proto.CompactTextString(m) } func (*GenesisState) ProtoMessage() {} func (*GenesisState) Descriptor() ([]byte, []int) { - return fileDescriptor_742c4342512de36b, []int{0} + return fileDescriptor_f395b6882453331e, []int{0} } func (m *GenesisState) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -72,25 +72,28 @@ func init() { proto.RegisterType((*GenesisState)(nil), "zigbeealliance.distributedcomplianceledger.vendorinfo.GenesisState") } -func init() { proto.RegisterFile("vendorinfo/genesis.proto", fileDescriptor_742c4342512de36b) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/vendorinfo/genesis.proto", fileDescriptor_f395b6882453331e) +} -var fileDescriptor_742c4342512de36b = []byte{ - // 239 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x28, 0x4b, 0xcd, 0x4b, - 0xc9, 0x2f, 0xca, 0xcc, 0x4b, 0xcb, 0xd7, 0x4f, 0x4f, 0xcd, 0x4b, 0x2d, 0xce, 0x2c, 0xd6, 0x2b, - 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x32, 0xad, 0xca, 0x4c, 0x4f, 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, - 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4b, 0xc9, 0x2c, 0x2e, 0x29, 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, - 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, 0xa7, 0x16, 0xe9, 0x21, 0x0c, 0x91, - 0x92, 0x41, 0x32, 0x10, 0xc2, 0x8c, 0x07, 0xb1, 0x21, 0x86, 0x4a, 0x89, 0xa4, 0xe7, 0xa7, 0xe7, - 0x83, 0x99, 0xfa, 0x20, 0x16, 0x44, 0x54, 0xa9, 0x9e, 0x8b, 0xc7, 0x1d, 0x62, 0x77, 0x70, 0x49, - 0x62, 0x49, 0xaa, 0x50, 0x3e, 0x17, 0x1f, 0x44, 0xab, 0x67, 0x5e, 0x5a, 0xbe, 0x4f, 0x66, 0x71, - 0x89, 0x04, 0xa3, 0x02, 0xb3, 0x06, 0xb7, 0x91, 0xa3, 0x1e, 0x59, 0x6e, 0xd2, 0x0b, 0x83, 0x1b, - 0xe6, 0xc4, 0x72, 0xe2, 0x9e, 0x3c, 0x43, 0x10, 0x9a, 0xf1, 0x4e, 0xa9, 0x27, 0x1e, 0xc9, 0x31, - 0x5e, 0x78, 0x24, 0xc7, 0xf8, 0xe0, 0x91, 0x1c, 0xe3, 0x84, 0xc7, 0x72, 0x0c, 0x17, 0x1e, 0xcb, - 0x31, 0xdc, 0x78, 0x2c, 0xc7, 0x10, 0xe5, 0x9d, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, - 0x9f, 0xab, 0x0f, 0xb1, 0x5c, 0x17, 0x66, 0xbb, 0x3e, 0x92, 0xed, 0xba, 0x08, 0xeb, 0x75, 0x21, - 0xf6, 0xeb, 0x57, 0xe8, 0x23, 0x85, 0x44, 0x49, 0x65, 0x41, 0x6a, 0x71, 0x12, 0x1b, 0xd8, 0xbb, - 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc4, 0xf8, 0x50, 0xfd, 0x75, 0x01, 0x00, 0x00, +var fileDescriptor_f395b6882453331e = []byte{ + // 241 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0xae, 0xca, 0x4c, 0x4f, + 0x4a, 0x4d, 0x4d, 0xcc, 0xc9, 0xc9, 0x4c, 0xcc, 0x4b, 0x4e, 0xd5, 0x4f, 0xc9, 0x2c, 0x2e, 0x29, + 0xca, 0x4c, 0x2a, 0x2d, 0x49, 0x4d, 0x49, 0xce, 0xcf, 0x2d, 0x80, 0x88, 0xe6, 0xa4, 0xa6, 0xa4, + 0xa7, 0x16, 0xe9, 0x97, 0xa5, 0xe6, 0xa5, 0xe4, 0x17, 0x65, 0xe6, 0xa5, 0xe5, 0xeb, 0xa7, 0xa7, + 0xe6, 0xa5, 0x16, 0x67, 0x16, 0xeb, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x99, 0xa2, 0x1a, 0xa2, + 0x87, 0xc7, 0x10, 0x3d, 0x84, 0x21, 0x52, 0xee, 0xe4, 0xd9, 0x0d, 0x61, 0xc6, 0x83, 0xd8, 0x10, + 0xfb, 0xa5, 0x44, 0xd2, 0xf3, 0xd3, 0xf3, 0xc1, 0x4c, 0x7d, 0x10, 0x0b, 0x22, 0xaa, 0x54, 0xcf, + 0xc5, 0xe3, 0x0e, 0x71, 0x66, 0x70, 0x49, 0x62, 0x49, 0xaa, 0x50, 0x3e, 0x17, 0x1f, 0x44, 0xab, + 0x67, 0x5e, 0x5a, 0xbe, 0x4f, 0x66, 0x71, 0x89, 0x04, 0xa3, 0x02, 0xb3, 0x06, 0xb7, 0x91, 0xa3, + 0x1e, 0x59, 0xce, 0xd7, 0x0b, 0x83, 0x1b, 0xe6, 0xc4, 0x72, 0xe2, 0x9e, 0x3c, 0x43, 0x10, 0x9a, + 0xf1, 0x4e, 0xa9, 0x27, 0x1e, 0xc9, 0x31, 0x5e, 0x78, 0x24, 0xc7, 0xf8, 0xe0, 0x91, 0x1c, 0xe3, + 0x84, 0xc7, 0x72, 0x0c, 0x17, 0x1e, 0xcb, 0x31, 0xdc, 0x78, 0x2c, 0xc7, 0x10, 0xe5, 0x9d, 0x9e, + 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, 0xab, 0x0f, 0xb1, 0x5c, 0x17, 0x5b, 0x28, 0xe8, + 0x22, 0xac, 0xd7, 0x85, 0x86, 0x43, 0x05, 0x72, 0x48, 0x94, 0x54, 0x16, 0xa4, 0x16, 0x27, 0xb1, + 0x81, 0xbd, 0x6b, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0xd3, 0x83, 0x69, 0xcd, 0xcb, 0x01, 0x00, + 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { diff --git a/x/vendorinfo/types/messages_vendor_info.go b/x/vendorinfo/types/messages_vendor_info.go index 75a950000..489297517 100644 --- a/x/vendorinfo/types/messages_vendor_info.go +++ b/x/vendorinfo/types/messages_vendor_info.go @@ -1,6 +1,7 @@ package types import ( + "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" @@ -54,7 +55,7 @@ func (msg *MsgCreateVendorInfo) GetSignBytes() []byte { func (msg *MsgCreateVendorInfo) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } err = validator.Validate(msg) @@ -113,7 +114,7 @@ func (msg *MsgUpdateVendorInfo) GetSignBytes() []byte { func (msg *MsgUpdateVendorInfo) ValidateBasic() error { _, err := sdk.AccAddressFromBech32(msg.Creator) if err != nil { - return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) + return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) } err = validator.Validate(msg) @@ -161,7 +162,7 @@ func (msg *MsgUpdateVendorInfo) ValidateBasic() error { // func (msg *MsgDeleteVendorInfo) ValidateBasic() error { // _, err := sdk.AccAddressFromBech32(msg.Creator) // if err != nil { -// return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) +// return errors.Wrapf(sdkerrors.ErrInvalidAddress, "invalid creator address (%s)", err) // } // err = validator.Validate(msg) diff --git a/x/vendorinfo/types/messages_vendor_info_test.go b/x/vendorinfo/types/messages_vendor_info_test.go index 1abbcfc70..68a4ed2ad 100644 --- a/x/vendorinfo/types/messages_vendor_info_test.go +++ b/x/vendorinfo/types/messages_vendor_info_test.go @@ -3,9 +3,9 @@ package types import ( "testing" + tmrand "github.com/cometbft/cometbft/libs/rand" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/stretchr/testify/require" - tmrand "github.com/tendermint/tendermint/libs/rand" testconstants "github.com/zigbee-alliance/distributed-compliance-ledger/integration_tests/constants" "github.com/zigbee-alliance/distributed-compliance-ledger/testutil/sample" "github.com/zigbee-alliance/distributed-compliance-ledger/utils/validator" diff --git a/x/vendorinfo/types/query.pb.go b/x/vendorinfo/types/query.pb.go index 29250a0b8..b2287380c 100644 --- a/x/vendorinfo/types/query.pb.go +++ b/x/vendorinfo/types/query.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: vendorinfo/query.proto +// source: zigbeealliance/distributedcomplianceledger/vendorinfo/query.proto package types @@ -7,9 +7,9 @@ import ( context "context" fmt "fmt" query "github.com/cosmos/cosmos-sdk/types/query" - _ "github.com/gogo/protobuf/gogoproto" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -38,7 +38,7 @@ func (m *QueryGetVendorInfoRequest) Reset() { *m = QueryGetVendorInfoReq func (m *QueryGetVendorInfoRequest) String() string { return proto.CompactTextString(m) } func (*QueryGetVendorInfoRequest) ProtoMessage() {} func (*QueryGetVendorInfoRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_e4af47a04389bb67, []int{0} + return fileDescriptor_ae42cb3749c6e205, []int{0} } func (m *QueryGetVendorInfoRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -82,7 +82,7 @@ func (m *QueryGetVendorInfoResponse) Reset() { *m = QueryGetVendorInfoRe func (m *QueryGetVendorInfoResponse) String() string { return proto.CompactTextString(m) } func (*QueryGetVendorInfoResponse) ProtoMessage() {} func (*QueryGetVendorInfoResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_e4af47a04389bb67, []int{1} + return fileDescriptor_ae42cb3749c6e205, []int{1} } func (m *QueryGetVendorInfoResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -126,7 +126,7 @@ func (m *QueryAllVendorInfoRequest) Reset() { *m = QueryAllVendorInfoReq func (m *QueryAllVendorInfoRequest) String() string { return proto.CompactTextString(m) } func (*QueryAllVendorInfoRequest) ProtoMessage() {} func (*QueryAllVendorInfoRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_e4af47a04389bb67, []int{2} + return fileDescriptor_ae42cb3749c6e205, []int{2} } func (m *QueryAllVendorInfoRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -171,7 +171,7 @@ func (m *QueryAllVendorInfoResponse) Reset() { *m = QueryAllVendorInfoRe func (m *QueryAllVendorInfoResponse) String() string { return proto.CompactTextString(m) } func (*QueryAllVendorInfoResponse) ProtoMessage() {} func (*QueryAllVendorInfoResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_e4af47a04389bb67, []int{3} + return fileDescriptor_ae42cb3749c6e205, []int{3} } func (m *QueryAllVendorInfoResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -221,40 +221,42 @@ func init() { proto.RegisterType((*QueryAllVendorInfoResponse)(nil), "zigbeealliance.distributedcomplianceledger.vendorinfo.QueryAllVendorInfoResponse") } -func init() { proto.RegisterFile("vendorinfo/query.proto", fileDescriptor_e4af47a04389bb67) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/vendorinfo/query.proto", fileDescriptor_ae42cb3749c6e205) +} -var fileDescriptor_e4af47a04389bb67 = []byte{ - // 467 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x94, 0xc1, 0x6b, 0xd4, 0x40, - 0x14, 0xc6, 0x37, 0xad, 0x15, 0x19, 0xf1, 0x32, 0x88, 0xda, 0x50, 0x52, 0x09, 0xa2, 0x52, 0xd8, - 0x19, 0x5a, 0x11, 0xcf, 0x5b, 0xc4, 0x52, 0xbc, 0xb4, 0x7b, 0xf0, 0xe0, 0x45, 0x26, 0xc9, 0xeb, - 0x38, 0x30, 0x3b, 0x2f, 0xcd, 0xcc, 0x2e, 0x56, 0xf1, 0x22, 0x78, 0x17, 0xfc, 0x83, 0xbc, 0xf6, - 0x24, 0x05, 0x11, 0xbc, 0x28, 0xb2, 0xeb, 0x1f, 0x22, 0xc9, 0x64, 0x9b, 0x68, 0x77, 0x11, 0xba, - 0xf6, 0xf6, 0x32, 0x99, 0xf7, 0xbd, 0xef, 0xf7, 0xf2, 0x11, 0x72, 0x63, 0x04, 0x26, 0xc3, 0x42, - 0x99, 0x03, 0xe4, 0x87, 0x43, 0x28, 0x8e, 0x58, 0x5e, 0xa0, 0x43, 0xfa, 0xf0, 0xb5, 0x92, 0x09, - 0x80, 0xd0, 0x5a, 0x09, 0x93, 0x02, 0xcb, 0x94, 0x75, 0x85, 0x4a, 0x86, 0x0e, 0xb2, 0x14, 0x07, - 0xb9, 0x3f, 0xd5, 0x90, 0x49, 0x28, 0x58, 0x23, 0x11, 0xae, 0x49, 0x44, 0xa9, 0x81, 0x8b, 0x5c, - 0x71, 0x61, 0x0c, 0x3a, 0xe1, 0x14, 0x1a, 0xeb, 0x45, 0xc3, 0x8d, 0x14, 0xed, 0x00, 0x2d, 0x4f, - 0x84, 0x05, 0x3f, 0x8d, 0x8f, 0x36, 0x13, 0x70, 0x62, 0x93, 0xe7, 0x42, 0x2a, 0x53, 0x5d, 0xae, - 0xef, 0xae, 0xb5, 0x8c, 0xf9, 0xf2, 0x45, 0x59, 0xd7, 0x6f, 0xaf, 0x4b, 0x94, 0x58, 0x95, 0xbc, - 0xac, 0xfc, 0x69, 0xfc, 0x88, 0xac, 0xee, 0x97, 0xaa, 0x3b, 0xe0, 0x9e, 0x55, 0x2d, 0xbb, 0xe6, - 0x00, 0xfb, 0x70, 0x38, 0x04, 0xeb, 0x68, 0x48, 0xae, 0x78, 0x9d, 0xdd, 0xc7, 0xb7, 0x82, 0xdb, - 0xc1, 0xfd, 0x95, 0xfe, 0xe9, 0x73, 0xfc, 0x3e, 0x20, 0xe1, 0xac, 0x4e, 0x9b, 0xa3, 0xb1, 0x40, - 0x25, 0x21, 0xa3, 0xd3, 0xd3, 0xaa, 0xf9, 0xea, 0x56, 0x8f, 0x9d, 0x6b, 0x43, 0xac, 0x91, 0xdf, - 0xbe, 0x74, 0xfc, 0x63, 0xbd, 0xd3, 0x6f, 0x49, 0xc7, 0x69, 0x0d, 0xd0, 0xd3, 0xfa, 0x2c, 0xc0, - 0x13, 0x42, 0x9a, 0x2d, 0xd5, 0x2e, 0xee, 0x32, 0xbf, 0x52, 0x56, 0xae, 0x94, 0xf9, 0x0f, 0x58, - 0xaf, 0x94, 0xed, 0x09, 0x09, 0x75, 0x6f, 0xbf, 0xd5, 0x19, 0x7f, 0x9e, 0xc2, 0xfe, 0x35, 0x65, - 0x0e, 0xec, 0xf2, 0x05, 0xc1, 0xd2, 0x9d, 0x3f, 0x78, 0x96, 0x2a, 0x9e, 0x7b, 0xff, 0xe4, 0xf1, - 0x2e, 0xdb, 0x40, 0x5b, 0x9f, 0x96, 0xc9, 0x4a, 0x05, 0x44, 0xbf, 0x07, 0x84, 0x34, 0x33, 0xe9, - 0xde, 0x39, 0x6d, 0xcf, 0x0d, 0x51, 0xb8, 0xff, 0x1f, 0x15, 0x3d, 0x49, 0xbc, 0xf1, 0xee, 0xcb, - 0xaf, 0x8f, 0x4b, 0x77, 0x68, 0xcc, 0xb3, 0x54, 0xf3, 0x33, 0xa9, 0xb7, 0xfc, 0xcd, 0x34, 0xa6, - 0x6f, 0xe9, 0xd7, 0x80, 0x5c, 0x6b, 0x24, 0x7a, 0x5a, 0x2f, 0x86, 0x38, 0x2b, 0x66, 0x8b, 0x21, - 0xce, 0x8c, 0x54, 0xbc, 0x5e, 0x21, 0xae, 0xd2, 0x9b, 0x73, 0x10, 0xb7, 0xe1, 0x78, 0x1c, 0x05, - 0x27, 0xe3, 0x28, 0xf8, 0x39, 0x8e, 0x82, 0x0f, 0x93, 0xa8, 0x73, 0x32, 0x89, 0x3a, 0xdf, 0x26, - 0x51, 0xe7, 0xf9, 0x53, 0xa9, 0xdc, 0xcb, 0x61, 0xc2, 0x52, 0x1c, 0x70, 0xef, 0xab, 0x3b, 0x35, - 0xc6, 0x5b, 0xc6, 0xba, 0x8d, 0xb3, 0xae, 0xb7, 0xc6, 0x5f, 0xb5, 0x07, 0xb9, 0xa3, 0x1c, 0x6c, - 0x72, 0xb9, 0xfa, 0x4d, 0x3c, 0xf8, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x4e, 0x8c, 0xd6, 0x1e, 0xf5, - 0x04, 0x00, 0x00, +var fileDescriptor_ae42cb3749c6e205 = []byte{ + // 471 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x94, 0x41, 0x6b, 0x13, 0x41, + 0x14, 0xc7, 0xb3, 0xad, 0x15, 0x19, 0xf1, 0x32, 0x08, 0xda, 0x45, 0xb6, 0xb2, 0x88, 0x4a, 0x21, + 0x33, 0xb4, 0x22, 0x9e, 0x53, 0xc4, 0x50, 0xbc, 0xb4, 0x39, 0x78, 0xf0, 0x22, 0xb3, 0xbb, 0xaf, + 0xe3, 0xc0, 0x64, 0xde, 0x76, 0x67, 0x12, 0xac, 0xe2, 0x45, 0xf0, 0x2e, 0xf8, 0x81, 0xbc, 0xf6, + 0x24, 0x05, 0x11, 0xbc, 0x28, 0x92, 0xf8, 0x41, 0x24, 0x33, 0xdb, 0x6e, 0x6a, 0x13, 0x85, 0xd4, + 0xde, 0xde, 0x3e, 0xe6, 0xfd, 0xdf, 0xff, 0xf7, 0xf6, 0xf1, 0x48, 0xe7, 0xb5, 0x92, 0x19, 0x80, + 0xd0, 0x5a, 0x09, 0x93, 0x03, 0x2f, 0x94, 0x75, 0x95, 0xca, 0x06, 0x0e, 0x8a, 0x1c, 0xfb, 0x65, + 0xc8, 0x6a, 0x28, 0x24, 0x54, 0x7c, 0x08, 0xa6, 0xc0, 0x4a, 0x99, 0x3d, 0xe4, 0xfb, 0x03, 0xa8, + 0x0e, 0x58, 0x59, 0xa1, 0x43, 0xfa, 0xf0, 0xb4, 0x04, 0xfb, 0x8b, 0x04, 0x6b, 0x24, 0xe2, 0x5b, + 0x12, 0x51, 0x6a, 0xe0, 0xa2, 0x54, 0x5c, 0x18, 0x83, 0x4e, 0x38, 0x85, 0xc6, 0x06, 0xd1, 0x78, + 0x3d, 0x47, 0xdb, 0x47, 0xcb, 0x33, 0x61, 0x21, 0x74, 0xe3, 0xc3, 0x8d, 0x0c, 0x9c, 0xd8, 0xe0, + 0xa5, 0x90, 0xca, 0xf8, 0xc7, 0xf5, 0xdb, 0xee, 0x62, 0x0c, 0x21, 0x7c, 0x31, 0x89, 0x6b, 0xa1, + 0xeb, 0x12, 0x25, 0xfa, 0x90, 0x4f, 0xa2, 0x90, 0x4d, 0x1f, 0x91, 0xd5, 0xdd, 0x89, 0x81, 0x2e, + 0xb8, 0x67, 0xbe, 0x64, 0xdb, 0xec, 0x61, 0x0f, 0xf6, 0x07, 0x60, 0x1d, 0x8d, 0xc9, 0x95, 0xa0, + 0xb3, 0xfd, 0xf8, 0x66, 0x74, 0x3b, 0xba, 0xbf, 0xd2, 0x3b, 0xf9, 0x4e, 0xdf, 0x47, 0x24, 0x9e, + 0x55, 0x69, 0x4b, 0x34, 0x16, 0xa8, 0x24, 0x64, 0x78, 0x92, 0xf5, 0xc5, 0x57, 0x37, 0x3b, 0x6c, + 0xa1, 0x61, 0xb2, 0x46, 0x7e, 0xeb, 0xd2, 0xe1, 0x8f, 0xb5, 0x56, 0x6f, 0x4a, 0x3a, 0xcd, 0x6b, + 0x80, 0x8e, 0xd6, 0x67, 0x01, 0x9e, 0x10, 0xd2, 0x0c, 0xb4, 0x76, 0x71, 0x97, 0x85, 0xe9, 0xb3, + 0xc9, 0xf4, 0x59, 0xf8, 0xd7, 0xf5, 0xf4, 0xd9, 0x8e, 0x90, 0x50, 0xd7, 0xf6, 0xa6, 0x2a, 0xd3, + 0xcf, 0xc7, 0xb0, 0x7f, 0x74, 0x99, 0x03, 0xbb, 0x7c, 0x41, 0xb0, 0xb4, 0x7b, 0x8a, 0x67, 0xc9, + 0xf3, 0xdc, 0xfb, 0x27, 0x4f, 0x70, 0x39, 0x0d, 0xb4, 0xf9, 0x69, 0x99, 0xac, 0x78, 0x20, 0xfa, + 0x3d, 0x22, 0xa4, 0xe9, 0x49, 0x77, 0x16, 0xb4, 0x3d, 0x77, 0x89, 0xe2, 0xdd, 0xff, 0xa8, 0x18, + 0x48, 0xd2, 0xf5, 0x77, 0x5f, 0x7e, 0x7d, 0x5c, 0xba, 0x43, 0x53, 0x5e, 0xe4, 0xfa, 0xec, 0xd6, + 0x5b, 0xfe, 0xe6, 0x78, 0x4d, 0xdf, 0xd2, 0xaf, 0x11, 0xb9, 0xd6, 0x48, 0x74, 0xb4, 0x3e, 0x1f, + 0xe2, 0xac, 0x35, 0x3b, 0x1f, 0xe2, 0xcc, 0x95, 0x4a, 0xd7, 0x3c, 0xe2, 0x2a, 0xbd, 0x31, 0x07, + 0x71, 0x0b, 0x0e, 0x47, 0x49, 0x74, 0x34, 0x4a, 0xa2, 0x9f, 0xa3, 0x24, 0xfa, 0x30, 0x4e, 0x5a, + 0x47, 0xe3, 0xa4, 0xf5, 0x6d, 0x9c, 0xb4, 0x9e, 0x3f, 0x95, 0xca, 0xbd, 0x1c, 0x64, 0x2c, 0xc7, + 0x3e, 0x0f, 0xbe, 0xda, 0xb3, 0xae, 0x47, 0xbb, 0x71, 0xd6, 0xae, 0xef, 0xc7, 0xab, 0xe9, 0x46, + 0xee, 0xa0, 0x04, 0x9b, 0x5d, 0xf6, 0x67, 0xe2, 0xc1, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x07, + 0x51, 0x0e, 0x7a, 0x4b, 0x05, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -374,7 +376,7 @@ var _Query_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "vendorinfo/query.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/vendorinfo/query.proto", } func (m *QueryGetVendorInfoRequest) Marshal() (dAtA []byte, err error) { diff --git a/x/vendorinfo/types/query.pb.gw.go b/x/vendorinfo/types/query.pb.gw.go index a002c11a5..26d81d167 100644 --- a/x/vendorinfo/types/query.pb.gw.go +++ b/x/vendorinfo/types/query.pb.gw.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: vendorinfo/query.proto +// source: zigbeealliance/distributedcomplianceledger/vendorinfo/query.proto /* Package types is a reverse proxy. @@ -260,9 +260,9 @@ func RegisterQueryHandlerClient(ctx context.Context, mux *runtime.ServeMux, clie } var ( - pattern_Query_VendorInfo_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "vendorinfo", "vendors", "vendorID"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_VendorInfo_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dcl", "vendorinfo", "vendors", "vendorID"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_VendorInfoAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "vendorinfo", "vendors"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Query_VendorInfoAll_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"dcl", "vendorinfo", "vendors"}, "", runtime.AssumeColonVerbOpt(false))) ) var ( diff --git a/x/vendorinfo/types/tx.pb.go b/x/vendorinfo/types/tx.pb.go index 7264bca4e..ad3745373 100644 --- a/x/vendorinfo/types/tx.pb.go +++ b/x/vendorinfo/types/tx.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: vendorinfo/tx.proto +// source: zigbeealliance/distributedcomplianceledger/vendorinfo/tx.proto package types @@ -7,9 +7,9 @@ import ( context "context" fmt "fmt" _ "github.com/cosmos/cosmos-proto" - _ "github.com/gogo/protobuf/gogoproto" - grpc1 "github.com/gogo/protobuf/grpc" - proto "github.com/gogo/protobuf/proto" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" @@ -43,7 +43,7 @@ func (m *MsgCreateVendorInfo) Reset() { *m = MsgCreateVendorInfo{} } func (m *MsgCreateVendorInfo) String() string { return proto.CompactTextString(m) } func (*MsgCreateVendorInfo) ProtoMessage() {} func (*MsgCreateVendorInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_193d895112924f09, []int{0} + return fileDescriptor_c60960e5353f4d12, []int{0} } func (m *MsgCreateVendorInfo) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -128,7 +128,7 @@ func (m *MsgCreateVendorInfoResponse) Reset() { *m = MsgCreateVendorInfo func (m *MsgCreateVendorInfoResponse) String() string { return proto.CompactTextString(m) } func (*MsgCreateVendorInfoResponse) ProtoMessage() {} func (*MsgCreateVendorInfoResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_193d895112924f09, []int{1} + return fileDescriptor_c60960e5353f4d12, []int{1} } func (m *MsgCreateVendorInfoResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -171,7 +171,7 @@ func (m *MsgUpdateVendorInfo) Reset() { *m = MsgUpdateVendorInfo{} } func (m *MsgUpdateVendorInfo) String() string { return proto.CompactTextString(m) } func (*MsgUpdateVendorInfo) ProtoMessage() {} func (*MsgUpdateVendorInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_193d895112924f09, []int{2} + return fileDescriptor_c60960e5353f4d12, []int{2} } func (m *MsgUpdateVendorInfo) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -256,7 +256,7 @@ func (m *MsgUpdateVendorInfoResponse) Reset() { *m = MsgUpdateVendorInfo func (m *MsgUpdateVendorInfoResponse) String() string { return proto.CompactTextString(m) } func (*MsgUpdateVendorInfoResponse) ProtoMessage() {} func (*MsgUpdateVendorInfoResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_193d895112924f09, []int{3} + return fileDescriptor_c60960e5353f4d12, []int{3} } func (m *MsgUpdateVendorInfoResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -292,46 +292,49 @@ func init() { proto.RegisterType((*MsgUpdateVendorInfoResponse)(nil), "zigbeealliance.distributedcomplianceledger.vendorinfo.MsgUpdateVendorInfoResponse") } -func init() { proto.RegisterFile("vendorinfo/tx.proto", fileDescriptor_193d895112924f09) } - -var fileDescriptor_193d895112924f09 = []byte{ - // 574 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x95, 0x4f, 0x6e, 0xd3, 0x40, - 0x14, 0xc6, 0xeb, 0xfe, 0x85, 0x91, 0x2a, 0x55, 0x6e, 0x16, 0x21, 0x2d, 0x76, 0x64, 0xb1, 0xe8, - 0xa2, 0xb1, 0x69, 0xab, 0x54, 0xa8, 0x52, 0x04, 0x84, 0x6e, 0x42, 0x53, 0x54, 0x19, 0x5a, 0x21, - 0x36, 0xd5, 0xc4, 0x7e, 0x99, 0x8e, 0x64, 0x7b, 0xcc, 0xcc, 0xa4, 0x4a, 0x38, 0x05, 0x47, 0x40, - 0x1c, 0x01, 0x71, 0x08, 0x96, 0x15, 0x2b, 0x56, 0x11, 0x4a, 0x0e, 0x80, 0x94, 0x13, 0xa0, 0x78, - 0x12, 0x62, 0x1a, 0xb7, 0x95, 0x28, 0x0b, 0x16, 0xec, 0x5e, 0x9e, 0xf3, 0xfd, 0xde, 0xa7, 0x37, - 0x9f, 0x66, 0xd0, 0xea, 0x39, 0x44, 0x3e, 0xe3, 0x34, 0x6a, 0x32, 0x47, 0xb6, 0xed, 0x98, 0x33, - 0xc9, 0xf4, 0xf2, 0x3b, 0x4a, 0x1a, 0x00, 0x38, 0x08, 0x28, 0x8e, 0x3c, 0xb0, 0x7d, 0x2a, 0x24, - 0xa7, 0x8d, 0x96, 0x04, 0xdf, 0x63, 0x61, 0xac, 0xba, 0x01, 0xf8, 0x04, 0xb8, 0x3d, 0xd1, 0x17, - 0xd6, 0x53, 0x2c, 0x55, 0x9e, 0x0e, 0x6b, 0x05, 0x2d, 0xe4, 0x08, 0x23, 0x2c, 0x29, 0x9d, 0x61, - 0x35, 0xea, 0xde, 0xf3, 0x98, 0x08, 0x99, 0x38, 0x55, 0x1f, 0xd4, 0x0f, 0xf5, 0xc9, 0xfa, 0x30, - 0x8f, 0x56, 0x0f, 0x05, 0x79, 0xc6, 0x01, 0x4b, 0x38, 0x49, 0x78, 0xb5, 0xa8, 0xc9, 0xf4, 0x1a, - 0x5a, 0xf2, 0x86, 0x3d, 0xc6, 0xf3, 0x5a, 0x51, 0xdb, 0xb8, 0x5b, 0x75, 0x06, 0x5d, 0x73, 0xf5, - 0x1c, 0x07, 0xd4, 0xc7, 0x12, 0xf6, 0x2c, 0x0e, 0x6f, 0x5b, 0x94, 0x83, 0x6f, 0x7d, 0xfd, 0x5c, - 0xca, 0x8d, 0x88, 0x4f, 0x7d, 0x9f, 0x83, 0x10, 0x2f, 0x25, 0xa7, 0x11, 0x71, 0xc7, 0x7a, 0x7d, - 0x0f, 0xdd, 0x51, 0x46, 0x6b, 0xfb, 0xf9, 0xd9, 0xa2, 0xb6, 0xb1, 0x50, 0x35, 0x06, 0x5d, 0xb3, - 0x30, 0x61, 0x11, 0x09, 0x95, 0xad, 0xcd, 0x40, 0x42, 0x65, 0xb7, 0x5c, 0xde, 0x29, 0x5b, 0xee, - 0xaf, 0xff, 0xeb, 0x8f, 0x11, 0x52, 0xf5, 0x0b, 0x1c, 0x42, 0x7e, 0x2e, 0x71, 0x62, 0x0e, 0xba, - 0xe6, 0xda, 0xb4, 0x93, 0xcd, 0x10, 0xb7, 0x2b, 0x5b, 0xdb, 0x8f, 0x2c, 0x37, 0x25, 0xd1, 0x0f, - 0xd0, 0xca, 0x70, 0x9b, 0x38, 0xea, 0xd4, 0x81, 0xe0, 0x20, 0xc1, 0xcc, 0xdf, 0x88, 0xd9, 0x2e, - 0xef, 0x5a, 0xee, 0x94, 0x50, 0x7f, 0x85, 0x72, 0xa3, 0xde, 0x11, 0x87, 0x26, 0x70, 0x0e, 0x7e, - 0x02, 0x5c, 0x48, 0x80, 0xc5, 0x41, 0xd7, 0x5c, 0x9f, 0x00, 0x59, 0x48, 0x25, 0x84, 0xb1, 0xec, - 0x4c, 0x88, 0x99, 0x6a, 0xfd, 0x35, 0xca, 0x29, 0xc3, 0x75, 0x1c, 0xf9, 0x34, 0x22, 0x47, 0x98, - 0xc0, 0xb1, 0x5b, 0xcf, 0x2f, 0x26, 0xd4, 0x07, 0x83, 0xae, 0x59, 0xbc, 0x86, 0xba, 0xd9, 0xe2, - 0x81, 0xe5, 0x66, 0x12, 0xf4, 0x7d, 0xb4, 0x2c, 0xbc, 0x33, 0x08, 0xf1, 0x09, 0x70, 0x41, 0x59, - 0x94, 0x5f, 0x2a, 0x6a, 0x1b, 0xcb, 0x59, 0xeb, 0x7f, 0x98, 0x5e, 0xff, 0xef, 0x22, 0xeb, 0x3e, - 0x5a, 0xcb, 0x48, 0x88, 0x0b, 0x22, 0x66, 0x91, 0x00, 0xeb, 0xa3, 0x4a, 0xd0, 0x71, 0xec, 0xff, - 0x93, 0x09, 0x7a, 0x92, 0x91, 0xa0, 0x6b, 0x4f, 0x6a, 0x2a, 0x42, 0xf5, 0x2b, 0x23, 0x74, 0xf3, - 0x89, 0xff, 0xcf, 0x50, 0x3a, 0x43, 0x97, 0x33, 0x32, 0xce, 0xd0, 0xf6, 0x8f, 0x59, 0x34, 0x77, - 0x28, 0x88, 0xfe, 0x49, 0x43, 0x2b, 0x53, 0x57, 0xd1, 0x73, 0xfb, 0x8f, 0x6e, 0x4a, 0x3b, 0x23, - 0xb4, 0x05, 0xf7, 0xef, 0xb1, 0xc6, 0xe6, 0x13, 0xd3, 0x53, 0xe9, 0xbf, 0x85, 0xe9, 0xcb, 0xac, - 0xdb, 0x98, 0xbe, 0x6a, 0xe3, 0x55, 0xf8, 0xd2, 0x33, 0xb4, 0x8b, 0x9e, 0xa1, 0x7d, 0xef, 0x19, - 0xda, 0xfb, 0xbe, 0x31, 0x73, 0xd1, 0x37, 0x66, 0xbe, 0xf5, 0x8d, 0x99, 0x37, 0x07, 0x84, 0xca, - 0xb3, 0x56, 0xc3, 0xf6, 0x58, 0xe8, 0xa8, 0xb9, 0xa5, 0xf1, 0x60, 0x27, 0x35, 0xb8, 0x34, 0x99, - 0x5c, 0x52, 0xa3, 0x9d, 0xb6, 0x93, 0x7e, 0xe7, 0x3a, 0x31, 0x88, 0xc6, 0x62, 0xf2, 0xca, 0xec, - 0xfc, 0x0c, 0x00, 0x00, 0xff, 0xff, 0xb8, 0x8c, 0x60, 0x7e, 0x02, 0x07, 0x00, 0x00, +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/vendorinfo/tx.proto", fileDescriptor_c60960e5353f4d12) +} + +var fileDescriptor_c60960e5353f4d12 = []byte{ + // 580 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x95, 0xcd, 0x6a, 0xdb, 0x4e, + 0x14, 0xc5, 0xa3, 0x7c, 0xfe, 0xff, 0x03, 0x81, 0xa0, 0x78, 0xe1, 0x3a, 0xad, 0x64, 0x44, 0x17, + 0x5e, 0xd8, 0x52, 0xe3, 0xe0, 0x50, 0x02, 0xee, 0x87, 0x1b, 0x28, 0x6e, 0x9c, 0x12, 0xd4, 0x26, + 0x94, 0x6e, 0xc2, 0x58, 0xba, 0x9e, 0x0c, 0x48, 0x1a, 0x75, 0x66, 0x1c, 0xec, 0x3e, 0x45, 0x1f, + 0xa1, 0xf4, 0x11, 0x4a, 0x1f, 0xa2, 0xcb, 0xd0, 0x55, 0x57, 0xa6, 0xd8, 0x0f, 0x50, 0xf0, 0x13, + 0x14, 0x4b, 0x76, 0x6d, 0xc7, 0x8a, 0x03, 0x49, 0x17, 0x5d, 0x74, 0x77, 0x7d, 0xe5, 0xf3, 0xbb, + 0x87, 0xab, 0x83, 0x2e, 0x7a, 0xf4, 0x9e, 0x92, 0x3a, 0x00, 0xf6, 0x3c, 0x8a, 0x03, 0x07, 0x2c, + 0x97, 0x0a, 0xc9, 0x69, 0xbd, 0x29, 0xc1, 0x75, 0x98, 0x1f, 0xc6, 0x5d, 0x0f, 0x5c, 0x02, 0xdc, + 0x3a, 0x87, 0xc0, 0x65, 0x9c, 0x06, 0x0d, 0x66, 0xc9, 0x96, 0x19, 0x72, 0x26, 0x99, 0x5a, 0x9a, + 0xd6, 0x9b, 0x73, 0xf4, 0xe6, 0x58, 0x9f, 0x79, 0x7e, 0xb3, 0xb1, 0x71, 0x79, 0x3a, 0xa8, 0xe3, + 0xf9, 0x99, 0x14, 0x61, 0x84, 0x45, 0xa5, 0x35, 0xa8, 0x86, 0xdd, 0x3b, 0x0e, 0x13, 0x3e, 0x13, + 0xa7, 0xf1, 0x83, 0xf8, 0x47, 0xfc, 0xc8, 0xf8, 0xb8, 0x8c, 0x36, 0x0f, 0x05, 0x79, 0xc6, 0x01, + 0x4b, 0x38, 0x89, 0x78, 0xd5, 0xa0, 0xc1, 0xd4, 0x2a, 0x5a, 0x73, 0x06, 0x3d, 0xc6, 0xd3, 0x4a, + 0x56, 0xc9, 0xfd, 0x5f, 0xb1, 0xfa, 0x1d, 0x7d, 0xf3, 0x1c, 0x7b, 0xd4, 0xc5, 0x12, 0xf6, 0x0c, + 0x0e, 0xef, 0x9a, 0x94, 0x83, 0x6b, 0x7c, 0xfb, 0x52, 0x48, 0x0d, 0x89, 0x4f, 0x5d, 0x97, 0x83, + 0x10, 0xaf, 0x24, 0xa7, 0x01, 0xb1, 0x47, 0x7a, 0x75, 0x0f, 0xfd, 0x17, 0x1b, 0xad, 0xee, 0xa7, + 0x17, 0xb3, 0x4a, 0x6e, 0xa5, 0xa2, 0xf5, 0x3b, 0x7a, 0x66, 0xcc, 0x22, 0x12, 0xca, 0xdb, 0x79, + 0x4f, 0x42, 0x79, 0xb7, 0x54, 0xda, 0x29, 0x19, 0xf6, 0xef, 0xff, 0xab, 0x8f, 0x11, 0x8a, 0xeb, + 0x97, 0xd8, 0x87, 0xf4, 0x52, 0xe4, 0x44, 0xef, 0x77, 0xf4, 0xad, 0x59, 0x27, 0x79, 0x1f, 0xb7, + 0xca, 0xdb, 0xc5, 0x87, 0x86, 0x3d, 0x21, 0x51, 0x0f, 0xd0, 0xc6, 0x60, 0x83, 0x38, 0x68, 0xd7, + 0x80, 0x60, 0x2f, 0xc2, 0x2c, 0x5f, 0x8b, 0x29, 0x96, 0x76, 0x0d, 0x7b, 0x46, 0xa8, 0xbe, 0x46, + 0xa9, 0x61, 0xef, 0x88, 0x43, 0x03, 0x38, 0x07, 0x37, 0x02, 0xae, 0x44, 0xc0, 0x6c, 0xbf, 0xa3, + 0xdf, 0x1d, 0x03, 0x99, 0x4f, 0x25, 0xf8, 0xa1, 0x6c, 0x8f, 0x89, 0x89, 0x6a, 0xf5, 0x0d, 0x4a, + 0xc5, 0x86, 0x6b, 0x38, 0x70, 0x69, 0x40, 0x8e, 0x30, 0x81, 0x63, 0xbb, 0x96, 0x5e, 0x8d, 0xa8, + 0xf7, 0xfb, 0x1d, 0x3d, 0x3b, 0x87, 0x9a, 0x6f, 0x72, 0xcf, 0xb0, 0x13, 0x09, 0xea, 0x3e, 0x5a, + 0x17, 0xce, 0x19, 0xf8, 0xf8, 0x04, 0xb8, 0xa0, 0x2c, 0x48, 0xaf, 0x65, 0x95, 0xdc, 0x7a, 0xd2, + 0xfa, 0x1f, 0x4c, 0xae, 0x7f, 0x5a, 0x64, 0xdc, 0x43, 0x5b, 0x09, 0x09, 0xb1, 0x41, 0x84, 0x2c, + 0x10, 0x60, 0x7c, 0x8a, 0x13, 0x74, 0x1c, 0xba, 0x7f, 0x65, 0x82, 0x9e, 0x24, 0x24, 0x68, 0xee, + 0x9b, 0x9a, 0x89, 0x50, 0xed, 0xca, 0x08, 0x5d, 0xff, 0xc6, 0xff, 0x65, 0x68, 0x32, 0x43, 0x97, + 0x33, 0x32, 0xca, 0x50, 0xf1, 0xe7, 0x22, 0x5a, 0x3a, 0x14, 0x44, 0xfd, 0xac, 0xa0, 0x8d, 0x99, + 0x4f, 0xd1, 0x0b, 0xf3, 0x46, 0x1f, 0x55, 0x33, 0x21, 0xb4, 0x19, 0xfb, 0xcf, 0xb1, 0x46, 0xe6, + 0x23, 0xd3, 0x33, 0xe9, 0xbf, 0x85, 0xe9, 0xcb, 0xac, 0xdb, 0x98, 0xbe, 0x6a, 0xe3, 0x15, 0xf8, + 0xda, 0xd5, 0x94, 0x8b, 0xae, 0xa6, 0xfc, 0xe8, 0x6a, 0xca, 0x87, 0x9e, 0xb6, 0x70, 0xd1, 0xd3, + 0x16, 0xbe, 0xf7, 0xb4, 0x85, 0xb7, 0x07, 0x84, 0xca, 0xb3, 0x66, 0xdd, 0x74, 0x98, 0x6f, 0xc5, + 0x73, 0x0b, 0x49, 0x77, 0xa9, 0x30, 0x9e, 0x5c, 0x18, 0x5e, 0xa6, 0xd6, 0xd4, 0x49, 0x6c, 0x87, + 0x20, 0xea, 0xab, 0xd1, 0x95, 0xd9, 0xf9, 0x15, 0x00, 0x00, 0xff, 0xff, 0x93, 0x67, 0x38, 0xef, + 0x58, 0x07, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -447,7 +450,7 @@ var _Msg_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "vendorinfo/tx.proto", + Metadata: "zigbeealliance/distributedcomplianceledger/vendorinfo/tx.proto", } func (m *MsgCreateVendorInfo) Marshal() (dAtA []byte, err error) { diff --git a/x/vendorinfo/types/vendor_info.pb.go b/x/vendorinfo/types/vendor_info.pb.go index 6bb8933f0..49ea4d1f4 100644 --- a/x/vendorinfo/types/vendor_info.pb.go +++ b/x/vendorinfo/types/vendor_info.pb.go @@ -1,12 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: vendorinfo/vendor_info.proto +// source: zigbeealliance/distributedcomplianceledger/vendorinfo/vendor_info.proto package types import ( fmt "fmt" _ "github.com/cosmos/cosmos-proto" - proto "github.com/gogo/protobuf/proto" + proto "github.com/cosmos/gogoproto/proto" io "io" math "math" math_bits "math/bits" @@ -37,7 +37,7 @@ func (m *VendorInfo) Reset() { *m = VendorInfo{} } func (m *VendorInfo) String() string { return proto.CompactTextString(m) } func (*VendorInfo) ProtoMessage() {} func (*VendorInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_45c44599adc45c13, []int{0} + return fileDescriptor_89011ac07869c1d6, []int{0} } func (m *VendorInfo) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -119,32 +119,34 @@ func init() { proto.RegisterType((*VendorInfo)(nil), "zigbeealliance.distributedcomplianceledger.vendorinfo.VendorInfo") } -func init() { proto.RegisterFile("vendorinfo/vendor_info.proto", fileDescriptor_45c44599adc45c13) } +func init() { + proto.RegisterFile("zigbeealliance/distributedcomplianceledger/vendorinfo/vendor_info.proto", fileDescriptor_89011ac07869c1d6) +} -var fileDescriptor_45c44599adc45c13 = []byte{ - // 349 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x91, 0xd1, 0x4a, 0x32, 0x41, - 0x14, 0xc7, 0x5d, 0xbf, 0x4f, 0xad, 0x01, 0x21, 0x16, 0x2f, 0x36, 0x89, 0x45, 0xa2, 0x0b, 0x09, - 0x76, 0x17, 0x8c, 0x1e, 0x20, 0xe9, 0x46, 0x92, 0x90, 0x8d, 0xbc, 0xe8, 0x46, 0x66, 0x77, 0x8e, - 0xe3, 0xc0, 0xee, 0x8c, 0xcc, 0x8c, 0x91, 0x3d, 0x45, 0x0f, 0x13, 0x3d, 0x43, 0x97, 0xd2, 0x55, - 0x97, 0xa1, 0x2f, 0x12, 0x3b, 0xa3, 0x69, 0xe4, 0xdd, 0x39, 0xff, 0xf3, 0x3b, 0x67, 0x97, 0xf9, - 0xa1, 0x93, 0x47, 0xe0, 0x44, 0x48, 0xc6, 0xc7, 0x22, 0xb2, 0xe5, 0xa8, 0xa8, 0xc3, 0xa9, 0x14, - 0x5a, 0xb8, 0x97, 0xcf, 0x8c, 0x26, 0x00, 0x38, 0xcb, 0x18, 0xe6, 0x29, 0x84, 0x84, 0x29, 0x2d, - 0x59, 0x32, 0xd3, 0x40, 0x52, 0x91, 0x4f, 0x6d, 0x9a, 0x01, 0xa1, 0x20, 0xc3, 0xed, 0xa1, 0xe6, - 0x71, 0x2a, 0x54, 0x2e, 0xd4, 0xc8, 0x1c, 0x89, 0x6c, 0x63, 0x2f, 0x9e, 0xbe, 0x95, 0x11, 0x1a, - 0x1a, 0xb2, 0xc7, 0xc7, 0xc2, 0x6d, 0xa2, 0x03, 0xbb, 0xd7, 0xbb, 0xf6, 0x9c, 0x96, 0xd3, 0xae, - 0xc4, 0x3f, 0xbd, 0xeb, 0x23, 0x64, 0xeb, 0x5b, 0x9c, 0x83, 0x57, 0x6e, 0x39, 0xed, 0xc3, 0x78, - 0x27, 0x71, 0xcf, 0xd1, 0x51, 0xf1, 0x13, 0x98, 0xcf, 0xfb, 0x40, 0x71, 0x66, 0xa8, 0x7f, 0x86, - 0xfa, 0x93, 0xbb, 0x1d, 0xd4, 0x58, 0x67, 0x03, 0x09, 0x63, 0x90, 0x12, 0x88, 0xe1, 0xff, 0x1b, - 0x7e, 0xef, 0xac, 0xd8, 0xb1, 0x5f, 0xeb, 0x63, 0x4e, 0x18, 0xa7, 0x03, 0x4c, 0xe1, 0x3e, 0xee, - 0x7b, 0x15, 0xbb, 0xb3, 0x6f, 0xe6, 0x76, 0x50, 0x2d, 0x95, 0x80, 0xb5, 0x90, 0x5e, 0xb5, 0xc0, - 0xba, 0xde, 0xc7, 0x6b, 0xd0, 0x58, 0xbf, 0xc0, 0x15, 0x21, 0x12, 0x94, 0xba, 0xd3, 0x92, 0x71, - 0x1a, 0x6f, 0x40, 0xf7, 0x0c, 0xd5, 0x55, 0x3a, 0x81, 0x1c, 0x0f, 0x41, 0x2a, 0x26, 0xb8, 0x57, - 0x6b, 0x39, 0xed, 0x7a, 0xfc, 0x3b, 0xec, 0xc2, 0xfb, 0xd2, 0x77, 0x16, 0x4b, 0xdf, 0xf9, 0x5a, - 0xfa, 0xce, 0xcb, 0xca, 0x2f, 0x2d, 0x56, 0x7e, 0xe9, 0x73, 0xe5, 0x97, 0x1e, 0x6e, 0x28, 0xd3, - 0x93, 0x59, 0x12, 0xa6, 0x22, 0x8f, 0xac, 0xaf, 0x60, 0x23, 0x2c, 0xda, 0x11, 0x16, 0x6c, 0x8d, - 0x05, 0x56, 0x59, 0xf4, 0x14, 0xed, 0xd8, 0xd7, 0xf3, 0x29, 0xa8, 0xa4, 0x6a, 0x34, 0x5d, 0x7c, - 0x07, 0x00, 0x00, 0xff, 0xff, 0x81, 0x35, 0xce, 0x59, 0x18, 0x02, 0x00, 0x00, +var fileDescriptor_89011ac07869c1d6 = []byte{ + // 350 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x41, 0x4a, 0xc3, 0x40, + 0x14, 0x86, 0x9b, 0x6a, 0x5b, 0x1d, 0x28, 0x48, 0xe8, 0x22, 0x76, 0x11, 0x8a, 0xb8, 0x28, 0x42, + 0x12, 0xa8, 0x78, 0x00, 0x8b, 0x20, 0xc5, 0x22, 0x25, 0x62, 0x17, 0x6e, 0xca, 0x24, 0xf3, 0x9a, + 0x0e, 0x24, 0x33, 0x65, 0x66, 0x2a, 0xd6, 0x53, 0x78, 0x18, 0xf1, 0x0c, 0x2e, 0x8b, 0x2b, 0x97, + 0xd2, 0x5e, 0x44, 0x32, 0x13, 0x6d, 0x8b, 0xc5, 0xdd, 0x7b, 0xff, 0xfb, 0xfe, 0x3f, 0x61, 0x7e, + 0x74, 0xfd, 0x4c, 0x93, 0x08, 0x00, 0xa7, 0x29, 0xc5, 0x2c, 0x86, 0x80, 0x50, 0xa9, 0x04, 0x8d, + 0x66, 0x0a, 0x48, 0xcc, 0xb3, 0xa9, 0x51, 0x53, 0x20, 0x09, 0x88, 0xe0, 0x11, 0x18, 0xe1, 0x82, + 0xb2, 0x31, 0x2f, 0xc6, 0x51, 0x3e, 0xfb, 0x53, 0xc1, 0x15, 0xb7, 0x2f, 0xb6, 0x83, 0xfc, 0x7f, + 0x82, 0xfc, 0x75, 0x50, 0xf3, 0x38, 0xe6, 0x32, 0xe3, 0x72, 0xa4, 0x43, 0x02, 0xb3, 0x98, 0xc4, + 0x93, 0xb7, 0x32, 0x42, 0x43, 0x4d, 0xf6, 0xd8, 0x98, 0xdb, 0x4d, 0x74, 0x60, 0x7c, 0xbd, 0x2b, + 0xc7, 0x6a, 0x59, 0xed, 0x4a, 0xf8, 0xbb, 0xdb, 0x2e, 0x42, 0x66, 0xbe, 0xc5, 0x19, 0x38, 0xe5, + 0x96, 0xd5, 0x3e, 0x0c, 0x37, 0x14, 0xfb, 0x0c, 0x1d, 0xe5, 0x3f, 0x81, 0xd9, 0xbc, 0x0f, 0x09, + 0x4e, 0x35, 0xb5, 0xa7, 0xa9, 0x3f, 0xba, 0xdd, 0x41, 0x8d, 0x42, 0x1b, 0x08, 0x18, 0x83, 0x10, + 0x40, 0x34, 0xbf, 0xaf, 0xf9, 0x9d, 0xb7, 0xdc, 0x63, 0xbe, 0xd6, 0xc7, 0x8c, 0x50, 0x96, 0x0c, + 0x70, 0x02, 0xf7, 0x61, 0xdf, 0xa9, 0x18, 0xcf, 0xae, 0x9b, 0xdd, 0x41, 0xb5, 0x58, 0x00, 0x56, + 0x5c, 0x38, 0xd5, 0x1c, 0xeb, 0x3a, 0x1f, 0xaf, 0x5e, 0xa3, 0x78, 0x81, 0x4b, 0x42, 0x04, 0x48, + 0x79, 0xa7, 0x04, 0x65, 0x49, 0xf8, 0x03, 0xda, 0xa7, 0xa8, 0x2e, 0xe3, 0x09, 0x64, 0x78, 0x08, + 0x42, 0x52, 0xce, 0x9c, 0x5a, 0xcb, 0x6a, 0xd7, 0xc3, 0x6d, 0xb1, 0x0b, 0xef, 0x4b, 0xd7, 0x5a, + 0x2c, 0x5d, 0xeb, 0x6b, 0xe9, 0x5a, 0x2f, 0x2b, 0xb7, 0xb4, 0x58, 0xb9, 0xa5, 0xcf, 0x95, 0x5b, + 0x7a, 0xb8, 0x49, 0xa8, 0x9a, 0xcc, 0x22, 0x3f, 0xe6, 0x59, 0x60, 0xfa, 0xf2, 0x76, 0x35, 0xef, + 0xad, 0x1b, 0xf3, 0x8a, 0xee, 0x9f, 0x36, 0xdb, 0x57, 0xf3, 0x29, 0xc8, 0xa8, 0xaa, 0x6b, 0x3a, + 0xff, 0x0e, 0x00, 0x00, 0xff, 0xff, 0x33, 0x11, 0xc0, 0x8d, 0x43, 0x02, 0x00, 0x00, } func (m *VendorInfo) Marshal() (dAtA []byte, err error) {